From e183c1d4732c605130aaa6ac3dcbb18ca51880f9 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Fri, 27 Sep 2024 07:33:10 -0700 Subject: [PATCH 01/36] chore: update CDK version following release (#14152) Co-authored-by: maxi297 <3360483+maxi297@users.noreply.github.com> --- airbyte-connector-builder-resources/CDK_VERSION | 2 +- airbyte-connector-builder-server/Dockerfile | 2 +- airbyte-connector-builder-server/requirements.in | 2 +- airbyte-connector-builder-server/requirements.txt | 4 ++-- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/airbyte-connector-builder-resources/CDK_VERSION b/airbyte-connector-builder-resources/CDK_VERSION index feb96e1d251..6ab7204dabc 100644 --- a/airbyte-connector-builder-resources/CDK_VERSION +++ b/airbyte-connector-builder-resources/CDK_VERSION @@ -1 +1 @@ -5.7.3 +5.7.5 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile index 1c42a5f0759..18f7aceeacb 100644 --- a/airbyte-connector-builder-server/Dockerfile +++ b/airbyte-connector-builder-server/Dockerfile @@ -2,7 +2,7 @@ ARG JAVA_PYTHON_BASE_IMAGE_VERSION=2.1.7 FROM airbyte/airbyte-base-java-python-image:${JAVA_PYTHON_BASE_IMAGE_VERSION} AS connector-builder-server # Set up CDK requirements -ARG CDK_VERSION=5.7.3 +ARG CDK_VERSION=5.7.5 ENV CDK_PYTHON=${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/python ENV CDK_ENTRYPOINT ${PYENV_ROOT}/versions/${PYTHON_VERSION}/lib/python3.10/site-packages/airbyte_cdk/connector_builder/main.py # Set up CDK diff --git a/airbyte-connector-builder-server/requirements.in b/airbyte-connector-builder-server/requirements.in index 445976d846e..c52a6bbb432 100644 --- a/airbyte-connector-builder-server/requirements.in +++ b/airbyte-connector-builder-server/requirements.in @@ -1 +1 @@ -airbyte-cdk==5.7.3 +airbyte-cdk==5.7.5 diff --git a/airbyte-connector-builder-server/requirements.txt b/airbyte-connector-builder-server/requirements.txt index 5b4727d436b..146dfa5da90 100644 --- a/airbyte-connector-builder-server/requirements.txt +++ b/airbyte-connector-builder-server/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile # -airbyte-cdk==5.7.3 +airbyte-cdk==5.7.5 # via -r requirements.in airbyte-protocol-models-dataclasses==0.13.0 # via airbyte-cdk @@ -77,7 +77,7 @@ jsonschema==3.2.0 # via airbyte-cdk langchain-core==0.1.42 # via airbyte-cdk -langsmith==0.1.128 +langsmith==0.1.129 # via langchain-core markupsafe==2.1.5 # via jinja2 From b26fc6a980d7c0c8b2385785a6ea9351f6079d2a Mon Sep 17 00:00:00 2001 From: Vladimir Date: Fri, 27 Sep 2024 17:35:02 +0300 Subject: [PATCH 02/36] feat: [SyncCatalog V2] sticky namespace rows (#13822) --- .../SyncCatalogTable.module.scss | 13 +-- .../SyncCatalogTable/SyncCatalogTable.tsx | 102 +++++++++++++----- .../components/HeaderNamespaceCell.tsx | 100 ----------------- .../hooks/useNamespaceRowInView.tsx | 39 +++++++ .../ConnectionForm/SyncCatalogTable/utils.ts | 24 ++++- 5 files changed, 140 insertions(+), 138 deletions(-) delete mode 100644 airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/HeaderNamespaceCell.tsx create mode 100644 airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/hooks/useNamespaceRowInView.tsx diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.module.scss b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.module.scss index f5ab58ec12b..0e38b4f5b45 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.module.scss +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.module.scss @@ -10,24 +10,13 @@ $row-height: 40px; .table { width: 100%; border-spacing: 0; - max-width: 100%; border-radius: $border-radius; } -.thead { - position: sticky; - top: 0; - z-index: z-indices.$tableStickyHeader; -} - -.theadHidden { - display: none; -} - // --------- --------- .th { text-align: left; - width: auto; + height: $row-height; padding: 0 variables.$spacing-lg; white-space: nowrap; background-color: colors.$grey-50; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx index 63de9125638..d5b276f632c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx @@ -6,6 +6,7 @@ import { getCoreRowModel, getGroupedRowModel, getSortedRowModel, + Row, useReactTable, } from "@tanstack/react-table"; import classnames from "classnames"; @@ -35,7 +36,6 @@ import { FieldCursorCell } from "./components/FieldCursorCell"; import { FieldHashMapping } from "./components/FieldHashMapping"; import { FieldPKCell } from "./components/FieldPKCell"; import { FormControls } from "./components/FormControls"; -import { HeaderNamespaceCell } from "./components/HeaderNamespaceCell"; import { NamespaceNameCell } from "./components/NamespaceNameCell"; import { RefreshSchemaControl } from "./components/RefreshSchemaControl"; import { SelectedFieldsCell } from "./components/SelectedFieldsCell"; @@ -48,8 +48,16 @@ import { SyncModeCell } from "./components/SyncModeCell"; import { getExpandedRowModel } from "./getExpandedRowModel"; import { getFilteredRowModel } from "./getFilteredRowModel"; import { useInitialRowIndex } from "./hooks/useInitialRowIndex"; +import { useNamespaceRowInView } from "./hooks/useNamespaceRowInView"; import styles from "./SyncCatalogTable.module.scss"; -import { getRowChangeStatus, getSyncCatalogRows, isNamespaceRow, isStreamRow } from "./utils"; +import { + findRow, + getNamespaceRowId, + getRowChangeStatus, + getSyncCatalogRows, + isNamespaceRow, + isStreamRow, +} from "./utils"; import { FormConnectionFormValues, SyncStreamFieldWithId, useInitialFormValues } from "../formConfig"; export interface SyncCatalogUIModel { @@ -119,6 +127,8 @@ export const SyncCatalogTable: FC = () => { const [filtering, setFiltering] = useState(""); const deferredFilteringValue = useDeferredValue(filtering); + const customScrollParent = useContext(ScrollParentContext); + // Update stream const onUpdateStreamConfigWithStreamNode = useCallback( (streamNode: SyncStreamFieldWithId, updatedConfig: Partial) => { @@ -152,15 +162,18 @@ export const SyncCatalogTable: FC = () => { }), columnHelper.accessor("name", { id: "stream.name", - header: () => ( - - ), + header: () => + stickyRow && ( + + ), cell: ({ row, getValue }) => (
{ [initialExpandedState, toggleAllRowsExpanded] ); + const [stickyRowIndex, setStickyRowIndex] = useState(0); + const stickyIndexes = useMemo( + () => + rows.reduce((indexes, row, index) => { + if (row.depth === 0) { + indexes.push(index); + } + return indexes; + }, [] as number[]), + [rows] + ); + + const stickyRow: Row | undefined = useMemo(() => { + if (!rows.length) { + return; + } + + const row = rows[stickyRowIndex]; + // handle index out of bounds in case of collapsing all rows + if (!row) { + return; + } + + if (isNamespaceRow(row)) { + return row; + } + return findRow(rows, getNamespaceRowId(row)); + /** + * adding rows as a dependency will cause a millisecond flicker after toggling expand/collapse all + * we can't remove it since we need to react on any rows change(tab change, filter change, etc) + */ + }, [stickyRowIndex, rows]); + const Table: TableComponents["Table"] = ({ style, ...props }) => ( ); const TableHead: TableComponents["TableHead"] = React.forwardRef(({ style, ...restProps }, ref) => ( - + )); TableHead.displayName = "TableHead"; @@ -396,6 +434,12 @@ export const SyncCatalogTable: FC = () => { const index = props["data-index"]; const row = rows[index]; const { rowChangeStatus } = getRowChangeStatus(row); + const { ref } = useNamespaceRowInView(index, stickyRowIndex, stickyIndexes, setStickyRowIndex, customScrollParent); + + // the first row is the namespace row, we don't need to render it since header has the same content + if (index === 0 && isNamespaceRow(row)) { + return null; + } const rowStatusStyle = classnames(styles.tr, { [styles.added]: rowChangeStatus === "added" && mode !== "create", @@ -406,11 +450,16 @@ export const SyncCatalogTable: FC = () => { }); return ( - + {row.getVisibleCells().map((cell) => { const meta = cell.column.columnDef.meta as ColumnMeta | undefined; return ( - ); @@ -450,8 +499,6 @@ export const SyncCatalogTable: FC = () => { ]); }; - const customScrollParent = useContext(ScrollParentContext); - return ( <> @@ -499,7 +546,7 @@ export const SyncCatalogTable: FC = () => { totalCount={rows.length} - style={{ minHeight: 80 }} // namespace row height + stream row height + style={{ minHeight: 120 }} // header namespace row height + 2 stream rows height initialTopMostItemIndex={initialTopMostItemIndex} components={{ Table, @@ -509,6 +556,11 @@ export const SyncCatalogTable: FC = () => { }} fixedHeaderContent={headerContent} fixedItemHeight={40} + atTopStateChange={(atTop) => { + if (atTop && stickyRowIndex !== 0) { + setStickyRowIndex(0); + } + }} increaseViewportBy={50} useWindowScroll customScrollParent={customScrollParent ?? undefined} diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/HeaderNamespaceCell.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/HeaderNamespaceCell.tsx deleted file mode 100644 index de1a6676fc0..00000000000 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/HeaderNamespaceCell.tsx +++ /dev/null @@ -1,100 +0,0 @@ -import set from "lodash/set"; -import React from "react"; -import { useFormContext } from "react-hook-form"; -import { FormattedMessage } from "react-intl"; - -import { Button } from "components/ui/Button"; -import { CheckBox } from "components/ui/CheckBox"; -import { FlexContainer } from "components/ui/Flex"; -import { Icon } from "components/ui/Icon"; -import { Text } from "components/ui/Text"; - -import { AirbyteStreamAndConfiguration, NamespaceDefinitionType } from "core/api/types/AirbyteClient"; -import { useConnectionFormService } from "hooks/services/ConnectionForm/ConnectionFormService"; -import { useModalService } from "hooks/services/Modal"; - -import { DestinationNamespaceFormValues, DestinationNamespaceModal } from "../../../DestinationNamespaceModal"; -import { FormConnectionFormValues } from "../../formConfig"; - -interface HeaderNamespaceCellProps extends Pick { - streams: AirbyteStreamAndConfiguration[]; - onStreamsChanged: (streams: AirbyteStreamAndConfiguration[]) => void; - syncCheckboxDisabled?: boolean; -} - -export const HeaderNamespaceCell: React.FC = ({ - streams, - onStreamsChanged, - syncCheckboxDisabled, - namespaceDefinition, - namespaceFormat, -}) => { - const { mode } = useConnectionFormService(); - const { openModal } = useModalService(); - const { setValue } = useFormContext(); - - const destinationNamespaceChange = (value: DestinationNamespaceFormValues) => { - setValue("namespaceDefinition", value.namespaceDefinition, { shouldDirty: true }); - - if (value.namespaceDefinition === NamespaceDefinitionType.customformat) { - setValue("namespaceFormat", value.namespaceFormat); - } - }; - - const onToggleAllStreamsSyncSwitch = ({ target: { checked } }: React.ChangeEvent) => - onStreamsChanged( - streams.map((stream) => - set(stream, "config", { - ...stream.config, - selected: checked, - }) - ) - ); - const isPartOfStreamsSyncEnabled = () => - streams.some((stream) => stream.config?.selected) && - streams.filter((stream) => stream.config?.selected).length !== streams.length; - const areAllStreamsSyncEnabled = () => streams.every((stream) => stream.config?.selected) && streams.length > 0; - - return ( - - - - - - - - - - ); -}; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/hooks/useNamespaceRowInView.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/hooks/useNamespaceRowInView.tsx new file mode 100644 index 00000000000..3a9a861eaf2 --- /dev/null +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/hooks/useNamespaceRowInView.tsx @@ -0,0 +1,39 @@ +import { useEffect } from "react"; +import { useInView } from "react-intersection-observer"; + +export const useNamespaceRowInView = ( + rowIndex: number, + stickyRowIndex: number, + stickyIndexes: number[], + setStickyRowIndex: (stickyRowIndex: number) => void, + customScrollParent: HTMLElement | null +) => { + const { ref, inView, entry } = useInView({ + root: customScrollParent, + rootMargin: `-80px 0px -${customScrollParent && customScrollParent.clientHeight - 159}px 0px`, // area of the root container that will trigger the inView event + threshold: 1, + }); + + useEffect(() => { + if (!inView) { + return; + } + + let closestStickyIndex = 0; + + for (const num of stickyIndexes) { + if (num <= rowIndex) { + closestStickyIndex = num; + } else { + break; // No need to check further + } + } + + if (closestStickyIndex === stickyRowIndex) { + return; + } + setStickyRowIndex(closestStickyIndex); + }, [entry, inView, ref, rowIndex, setStickyRowIndex, stickyIndexes, stickyRowIndex]); + + return { ref }; +}; diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/utils.ts b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/utils.ts index 4dddd402b02..ed02767014c 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/utils.ts +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/utils.ts @@ -83,10 +83,32 @@ export const getSyncCatalogRows = ( }), })); }; - +/** + * Check if row is namespace + * @param row + */ export const isNamespaceRow = (row: Row) => row.depth === 0 && row.original.rowType === "namespace"; + +/** + * Check if row is stream + * @param row + */ export const isStreamRow = (row: Row) => row.depth === 1 && row.original.rowType === "stream"; +/** + * Get the root parent id, which is the namespace id + * @param row + */ +export const getNamespaceRowId = (row: Row) => row.id.split(".")[0]; + +/** + * Find row by id + * note: don't use getRow() method from react-table instance, when column filters are applied, it will return row by index not by id + * @param rows + * @param id + */ +export const findRow = (rows: Array>, id: string) => rows.find((row) => row.id === id); + /** * Is filter by stream enabled * @param columnFilters - column filters array, for "stream.selected" column the format is: { id: "stream.selected", value: boolean } From 045054a449a992bc32fab92364ce2de8c5e85aa2 Mon Sep 17 00:00:00 2001 From: Jonathan Pearlin Date: Fri, 27 Sep 2024 11:59:25 -0400 Subject: [PATCH 03/36] refactor: cleanup project (#14155) --- .../build.gradle.kts | 74 +++++-------------- .../rollout/worker/ConnectorRolloutWorker.kt | 3 - .../worker/ConnectorRolloutWorkerFactory.kt | 8 +- .../worker/ConnectorRolloutWorkflowImpl.kt | 40 +++++----- 4 files changed, 41 insertions(+), 84 deletions(-) diff --git a/airbyte-connector-rollout-worker/build.gradle.kts b/airbyte-connector-rollout-worker/build.gradle.kts index 7c4de8d564a..a801c4a1744 100644 --- a/airbyte-connector-rollout-worker/build.gradle.kts +++ b/airbyte-connector-rollout-worker/build.gradle.kts @@ -1,76 +1,30 @@ plugins { - id("io.micronaut.application") version "4.4.2" id("io.airbyte.gradle.jvm.app") - id("io.airbyte.gradle.jvm.lib") - id("io.airbyte.gradle.publish") id("io.airbyte.gradle.docker") - application + id("io.airbyte.gradle.publish") } -group = "io.airbyte.connector.rollout.worker" - -java { - sourceCompatibility = JavaVersion.VERSION_21 - targetCompatibility = JavaVersion.VERSION_21 -} +dependencies { + ksp(platform(libs.micronaut.platform)) + ksp(libs.bundles.micronaut.annotation.processor) -repositories { - mavenCentral() -} + implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.micronaut) + implementation(libs.kotlin.logging) + implementation(libs.temporal.sdk) + implementation(libs.airbyte.protocol) -dependencies { implementation(project(mapOf("path" to ":oss:airbyte-commons-temporal"))) - // TODO: remove the deps not being used - compileOnly(libs.lombok) - annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut - - implementation("io.temporal:temporal-sdk:1.25.0") implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-connector-rollout-shared")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) - implementation(libs.airbyte.protocol) -} - -application { - // Default to running ConnectorRolloutWorker - mainClass.set("io.airbyte.connector.rollout.worker.ConnectorRolloutWorkerApplication") -} - -tasks.jar { - manifest { - attributes( - "Main-Class" to "io.airbyte.connector.rollout.worker.ConnectorRolloutWorkerApplication" - ) - } - - archiveBaseName.set("run-connector-rollout-worker") - archiveVersion.set("") // Remove the version from the JAR file name -} - -tasks.withType { - options.encoding = "UTF-8" - options.compilerArgs.addAll(listOf("-Xlint:unchecked", "-Xlint:deprecation")) -} - -micronaut { - runtime("netty") - testRuntime("junit5") - processing { - incremental(true) - annotations("io.airbyte.connector.rollout.worker.*") - } -} - - -tasks.withType { - duplicatesStrategy = DuplicatesStrategy.INCLUDE } airbyte { application { - mainClass.set("io.airbyte.connector.rollout.worker.ConnectorRolloutWorkerApplication") + mainClass = "io.airbyte.connector.rollout.worker.ConnectorRolloutWorkerApplication" defaultJvmArgs = listOf("-XX:+ExitOnOutOfMemoryError", "-XX:MaxRAMPercentage=75.0") localEnvVars.putAll( mapOf( @@ -82,7 +36,13 @@ airbyte { ) } docker { - imageName.set("connector-rollout-worker") + imageName = "connector-rollout-worker" } } +// The DuplicatesStrategy will be required while this module is mixture of kotlin and java _with_ lombok dependencies. +// By default, Gradle runs all annotation processors and disables annotation processing by javac, however. Once lombok has +// been removed, this can also be removed. +tasks.withType().configureEach { + duplicatesStrategy = DuplicatesStrategy.EXCLUDE +} diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorker.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorker.kt index c72978c8ee5..70a97af6741 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorker.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorker.kt @@ -7,14 +7,11 @@ package io.airbyte.connector.rollout.worker import io.temporal.worker.WorkerFactory import jakarta.inject.Named import jakarta.inject.Singleton -import org.slf4j.LoggerFactory @Singleton class ConnectorRolloutWorker( @Named("connectorRolloutWorkerFactory") private val workerFactory: WorkerFactory, ) { - private val log = LoggerFactory.getLogger(ConnectorRolloutWorker::class.java) - fun startWorker() { workerFactory.start() } diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkerFactory.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkerFactory.kt index ab2084d9098..210a6ef974d 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkerFactory.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkerFactory.kt @@ -10,18 +10,18 @@ import io.airbyte.connector.rollout.worker.activities.FinalizeRolloutActivityImp import io.airbyte.connector.rollout.worker.activities.FindRolloutActivityImpl import io.airbyte.connector.rollout.worker.activities.GetRolloutActivityImpl import io.airbyte.connector.rollout.worker.activities.StartRolloutActivityImpl +import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.context.annotation.Factory import io.temporal.client.WorkflowClient import io.temporal.worker.Worker import io.temporal.worker.WorkerFactory import jakarta.inject.Named import jakarta.inject.Singleton -import org.slf4j.LoggerFactory + +private val logger = KotlinLogging.logger {} @Factory class ConnectorRolloutWorkerFactory { - private val log = LoggerFactory.getLogger(ConnectorRolloutWorkerFactory::class.java) - @Singleton @Named("connectorRolloutWorkerFactory") fun connectorRolloutWorkerFactory( @@ -32,7 +32,7 @@ class ConnectorRolloutWorkerFactory { updateRolloutActivityImpl: DoRolloutActivityImpl, finalizeRolloutActivityImpl: FinalizeRolloutActivityImpl, ): WorkerFactory { - log.info("ConnectorRolloutWorkerFactory registering workflow") + logger.info { "ConnectorRolloutWorkerFactory registering workflow" } val workerFactory = WorkerFactory.newInstance(workflowClient) val worker: Worker = workerFactory.newWorker(Constants.TASK_QUEUE) worker.registerWorkflowImplementationTypes(ConnectorRolloutWorkflowImpl::class.java) diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt index 6f064e3bf0c..3b7481c9d07 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/ConnectorRolloutWorkflowImpl.kt @@ -17,16 +17,17 @@ import io.airbyte.connector.rollout.worker.activities.FinalizeRolloutActivity import io.airbyte.connector.rollout.worker.activities.FindRolloutActivity import io.airbyte.connector.rollout.worker.activities.GetRolloutActivity import io.airbyte.connector.rollout.worker.activities.StartRolloutActivity +import io.github.oshai.kotlinlogging.KotlinLogging import io.temporal.activity.ActivityOptions import io.temporal.common.RetryOptions import io.temporal.failure.ApplicationFailure import io.temporal.workflow.Workflow -import org.slf4j.LoggerFactory import java.lang.reflect.Field import java.time.Duration +private val logger = KotlinLogging.logger {} + class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { - private val log = LoggerFactory.getLogger(ConnectorRolloutWorkflowImpl::class.java) private val activityOptions = ActivityOptions.newBuilder() .setStartToCloseTimeout(Duration.ofSeconds(10)) @@ -77,7 +78,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { override fun run(input: ConnectorRolloutActivityInputStart): ConnectorEnumRolloutState { val workflowId = "${input.dockerRepository}:${input.dockerImageTag}:${input.actorDefinitionId.toString().substring(0, 8)}" - log.info("Initialized rollout for $workflowId") + logger.info { "Initialized rollout for $workflowId" } Workflow.await { startRolloutFailed || failed || errored || canceled || succeeded } return when { startRolloutFailed -> throw ApplicationFailure.newFailure( @@ -97,11 +98,11 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { } override fun startRollout(input: ConnectorRolloutActivityInputStart): ConnectorRolloutOutput { - log.info("startRollout: calling startRolloutActivity") + logger.info { "startRollout: calling startRolloutActivity" } val workflowRunId = Workflow.getInfo().firstExecutionRunId return try { val output = startRolloutActivity.startRollout(workflowRunId, input) - log.info("startRolloutActivity.startRollout") + logger.info { "startRolloutActivity.startRollout" } output } catch (e: Exception) { startRolloutFailed = true @@ -110,58 +111,58 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { } override fun startRolloutValidator(input: ConnectorRolloutActivityInputStart) { - log.info("startRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}") + logger.info { "startRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}" } require(!(input.dockerRepository == null || input.dockerImageTag == null || input.actorDefinitionId == null || input.rolloutId == null)) { "Cannot start rollout; invalid input: ${mapAttributesToString(input)}" } } override fun findRollout(input: ConnectorRolloutActivityInputFind): List { - log.info("getRollout: calling getRolloutActivity") + logger.info { "getRollout: calling getRolloutActivity" } val output = findRolloutActivity.findRollout(input) - log.info("findRolloutActivity.findRollout: $output") + logger.info { "findRolloutActivity.findRollout: $output" } return output } override fun findRolloutValidator(input: ConnectorRolloutActivityInputFind) { - log.info("findRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}") + logger.info { "findRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}" } require(!(input.dockerRepository == null || input.dockerImageTag == null || input.actorDefinitionId == null)) { "Cannot find rollout; invalid input: ${mapAttributesToString(input)}" } } override fun getRollout(input: ConnectorRolloutActivityInputGet): ConnectorRolloutOutput { - log.info("getRollout: calling getRolloutActivity") + logger.info { "getRollout: calling getRolloutActivity" } val output = getRolloutActivity.getRollout(input) - log.info("getRolloutActivity.getRollout pinned_actors = ${output.actorIds}") + logger.info { "getRolloutActivity.getRollout pinned_actors = ${output.actorIds}" } return output } override fun getRolloutValidator(input: ConnectorRolloutActivityInputGet) { - log.info("getRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}") + logger.info { "getRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}" } require(!(input.dockerRepository == null || input.dockerImageTag == null || input.actorDefinitionId == null || input.rolloutId == null)) { "Cannot get rollout; invalid input: ${mapAttributesToString(input)}" } } override fun doRollout(input: ConnectorRolloutActivityInputRollout): ConnectorRolloutOutput { - log.info("doRollout: calling doRolloutActivity") + logger.info { "doRollout: calling doRolloutActivity" } val output = doRolloutActivity.doRollout(input) - log.info("doRolloutActivity.doRollout pinned_connections = ${output.actorIds}") + logger.info { "doRolloutActivity.doRollout pinned_connections = ${output.actorIds}" } return output } override fun doRolloutValidator(input: ConnectorRolloutActivityInputRollout) { - log.info("doRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}") + logger.info { "doRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}" } require(!(input.dockerRepository == null || input.dockerImageTag == null || input.actorDefinitionId == null || input.rolloutId == null)) { "Cannot do rollout; invalid input: ${mapAttributesToString(input)}" } } override fun finalizeRollout(input: ConnectorRolloutActivityInputFinalize): ConnectorRolloutOutput { - log.info("finalizeRollout: calling finalizeRolloutActivity") + logger.info { "finalizeRollout: calling finalizeRolloutActivity" } val rolloutResult = finalizeRolloutActivity.finalizeRollout(input) - log.info("finalizeRolloutActivity.finalizeRollout rolloutResult = $rolloutResult") + logger.info { "finalizeRolloutActivity.finalizeRollout rolloutResult = $rolloutResult" } when (input.result) { ConnectorRolloutFinalState.SUCCEEDED -> succeeded = true ConnectorRolloutFinalState.FAILED_ROLLED_BACK -> failed = true @@ -172,7 +173,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { } override fun finalizeRolloutValidator(input: ConnectorRolloutActivityInputFinalize) { - log.info("finalizeRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}") + logger.info { "finalizeRolloutValidator: ${input.dockerRepository}:${input.dockerImageTag}" } require( !( input.dockerRepository == null || @@ -197,8 +198,7 @@ class ConnectorRolloutWorkflowImpl : ConnectorRolloutWorkflow { .append(value?.toString() ?: "null") .append(" ") } catch (e: IllegalAccessException) { - LoggerFactory.getLogger(ConnectorRolloutWorkflowImpl::class.java) - .error("Error mapping attributes to string: ${e.message}") + logger.error(e) { "Error mapping attributes to string: ${e.message}" } } } return result.toString().trim() From 9966400f1d611f40ac0f16913612b45b040e9b4b Mon Sep 17 00:00:00 2001 From: Parker Mossman Date: Fri, 27 Sep 2024 09:30:14 -0700 Subject: [PATCH 04/36] feat: add /api/v1/billing/complete_checkout_session and refactor Stripe webhook processing (#14055) --- airbyte-api/server-api/build.gradle.kts | 346 ++++++++++-------- .../server-api/src/main/openapi/config.yaml | 24 ++ .../scheduling/AirbyteTaskExecutors.java | 6 + .../AirbyteHttpRequestFieldExtractor.java | 8 +- .../support/AuthorizationServerHandler.java | 8 +- .../apis/controllers/BillingController.kt | 27 +- .../src/main/resources/application.yml | 3 + 7 files changed, 240 insertions(+), 182 deletions(-) diff --git a/airbyte-api/server-api/build.gradle.kts b/airbyte-api/server-api/build.gradle.kts index 2844040c7e9..81dd511329a 100644 --- a/airbyte-api/server-api/build.gradle.kts +++ b/airbyte-api/server-api/build.gradle.kts @@ -43,7 +43,8 @@ dependencies { val specFile = "$projectDir/src/main/openapi/config.yaml" -val genApiServer = tasks.register("generateApiServer") { +val genApiServer = + tasks.register("generateApiServer") { val serverOutputDir = "${getLayout().buildDirectory.get()}/generated/api/server" inputs.file(specFile) @@ -58,47 +59,51 @@ val genApiServer = tasks.register("generateApiServer") { invokerPackage = "io.airbyte.api.invoker.generated" modelPackage = "io.airbyte.api.model.generated" - schemaMappings = mapOf( - "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", - "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", - "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", - "DeclarativeManifest" to "com.fasterxml.jackson.databind.JsonNode", - "SecretPersistenceConfigurationJson" to "com.fasterxml.jackson.databind.JsonNode", - "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", - ) + schemaMappings = + mapOf( + "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", + "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", + "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", + "DeclarativeManifest" to "com.fasterxml.jackson.databind.JsonNode", + "SecretPersistenceConfigurationJson" to "com.fasterxml.jackson.databind.JsonNode", + "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", + "BillingEvent" to "com.fasterxml.jackson.databind.JsonNode", + ) generateApiDocumentation = false - configOptions = mapOf( - "dateLibrary" to "java8", - "generatePom" to "false", - "interfaceOnly" to "true", + configOptions = + mapOf( + "dateLibrary" to "java8", + "generatePom" to "false", + "interfaceOnly" to "true", /* JAX-RS generator does not respect nullable properties defined in the OpenApi Spec. It means that if a field is not nullable but not set it is still returning a null value for this field in the serialized json. The below Jackson annotation is made to only keep non null values in serialized json. We are not yet using nullable=true properties in our OpenApi so this is a valid workaround at the moment to circumvent the default JAX-RS behavior described above. Feel free to read the conversation on https://github.com/airbytehq/airbyte/pull/13370 for more details. - */ - "additionalModelTypeAnnotations" to "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", - - // Generate separate classes for each endpoint "domain" - "useTags" to "true", - "useJakartaEe" to "true", - ) + */ + "additionalModelTypeAnnotations" to + "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", + // Generate separate classes for each endpoint "domain" + "useTags" to "true", + "useJakartaEe" to "true", + ) - doLast { - // Remove unnecessary invoker classes to avoid Micronaut picking them up and registering them as beans - delete("${outputDir.get()}/src/gen/java/${invokerPackage.get().replace(".", "/").replace("-","_")}") + doLast { + // Remove unnecessary invoker classes to avoid Micronaut picking them up and registering them as beans + delete("${outputDir.get()}/src/gen/java/${invokerPackage.get().replace(".", "/").replace("-","_")}") + } } -} -val genApiServer2 = tasks.register("genApiServer2") { +val genApiServer2 = + tasks.register("genApiServer2") { val serverOutputDir = "${getLayout().buildDirectory.get()}/generated/api/server2" inputs.file(specFile) @@ -113,109 +118,121 @@ val genApiServer2 = tasks.register("genApiServer2") { generateApiDocumentation = false - configOptions = mapOf( - "dateLibrary" to "java8", - "enumPropertyNaming" to "UPPERCASE", - "generatePom" to "false", - "interfaceOnly" to "true", - "library" to "jaxrs-spec", - "returnResponse" to "false", - "additionalModelTypeAnnotations" to "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", - "useTags" to "true", - "useJakartaEe" to "true", - ) + configOptions = + mapOf( + "dateLibrary" to "java8", + "enumPropertyNaming" to "UPPERCASE", + "generatePom" to "false", + "interfaceOnly" to "true", + "library" to "jaxrs-spec", + "returnResponse" to "false", + "additionalModelTypeAnnotations" to + "\n@com.fasterxml.jackson.annotation.JsonInclude(com.fasterxml.jackson.annotation.JsonInclude.Include.NON_NULL)", + "useTags" to "true", + "useJakartaEe" to "true", + ) - schemaMappings = mapOf( - "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", - "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", - "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", - "DeclarativeManifest" to "com.fasterxml.jackson.databind.JsonNode", - "SecretPersistenceConfigurationJson" to "com.fasterxml.jackson.databind.JsonNode", - "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", - ) -} + schemaMappings = + mapOf( + "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", + "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", + "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", + "DeclarativeManifest" to "com.fasterxml.jackson.databind.JsonNode", + "SecretPersistenceConfigurationJson" to "com.fasterxml.jackson.databind.JsonNode", + "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", + "BillingEvent" to "com.fasterxml.jackson.databind.JsonNode", + ) + } -val genApiClient = tasks.register("genApiClient") { - val clientOutputDir = "${getLayout().buildDirectory.get()}/generated/api/client" - - inputs.file(specFile) - outputs.dir(clientOutputDir) - - generatorName = "kotlin" - inputSpec = specFile - outputDir = clientOutputDir - - apiPackage = "io.airbyte.api.client.generated" - invokerPackage = "io.airbyte.api.client.invoker.generated" - modelPackage = "io.airbyte.api.client.model.generated" - - schemaMappings = mapOf( - "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", - "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", - "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", - "DeclarativeManifest" to "com.fasterxml.jackson.databind.JsonNode", - "SecretPersistenceConfigurationJson" to "com.fasterxml.jackson.databind.JsonNode", - "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", - ) +val genApiClient = + tasks.register("genApiClient") { + val clientOutputDir = "${getLayout().buildDirectory.get()}/generated/api/client" - generateApiDocumentation = false + inputs.file(specFile) + outputs.dir(clientOutputDir) - configOptions = mapOf( - "enumPropertyNaming" to "UPPERCASE", - "generatePom" to "false", - "interfaceOnly" to "true", - "serializationLibrary" to "jackson", - ) + generatorName = "kotlin" + inputSpec = specFile + outputDir = clientOutputDir + + apiPackage = "io.airbyte.api.client.generated" + invokerPackage = "io.airbyte.api.client.invoker.generated" + modelPackage = "io.airbyte.api.client.model.generated" + + schemaMappings = + mapOf( + "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", + "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", + "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", + "DeclarativeManifest" to "com.fasterxml.jackson.databind.JsonNode", + "SecretPersistenceConfigurationJson" to "com.fasterxml.jackson.databind.JsonNode", + "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", + "BillingEvent" to "com.fasterxml.jackson.databind.JsonNode", + ) + + generateApiDocumentation = false + + configOptions = + mapOf( + "enumPropertyNaming" to "UPPERCASE", + "generatePom" to "false", + "interfaceOnly" to "true", + "serializationLibrary" to "jackson", + ) - doLast { - val apiClientPath = "${outputDir.get()}/src/main/kotlin/org/openapitools/client/infrastructure/ApiClient.kt" - updateApiClientWithFailsafe(apiClientPath) - updateDomainClientsWithFailsafe("${outputDir.get()}/src/main/kotlin/io/airbyte/api/client/generated") - configureApiSerializer("${outputDir.get()}/src/main/kotlin/org/openapitools/client/infrastructure/Serializer.kt") + doLast { + val apiClientPath = "${outputDir.get()}/src/main/kotlin/org/openapitools/client/infrastructure/ApiClient.kt" + updateApiClientWithFailsafe(apiClientPath) + updateDomainClientsWithFailsafe("${outputDir.get()}/src/main/kotlin/io/airbyte/api/client/generated") + configureApiSerializer("${outputDir.get()}/src/main/kotlin/org/openapitools/client/infrastructure/Serializer.kt") + } } -} -val genApiDocs = tasks.register("generateApiDocs") { - val docsOutputDir = "${getLayout().buildDirectory.get()}/generated/api/docs" - - generatorName = "html" - inputSpec = specFile - outputDir = docsOutputDir - - apiPackage = "io.airbyte.api.client.generated" - invokerPackage = "io.airbyte.api.client.invoker.generated" - modelPackage = "io.airbyte.api.client.model.generated" - - schemaMappings = mapOf( - "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", - "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", - "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", - "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", - "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", - "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", - ) +val genApiDocs = + tasks.register("generateApiDocs") { + val docsOutputDir = "${getLayout().buildDirectory.get()}/generated/api/docs" - generateApiDocumentation = false + generatorName = "html" + inputSpec = specFile + outputDir = docsOutputDir + + apiPackage = "io.airbyte.api.client.generated" + invokerPackage = "io.airbyte.api.client.invoker.generated" + modelPackage = "io.airbyte.api.client.model.generated" + + schemaMappings = + mapOf( + "OAuthConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "SourceDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "SourceConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationDefinitionSpecification" to "com.fasterxml.jackson.databind.JsonNode", + "DestinationConfiguration" to "com.fasterxml.jackson.databind.JsonNode", + "StreamJsonSchema" to "com.fasterxml.jackson.databind.JsonNode", + "StateBlob" to "com.fasterxml.jackson.databind.JsonNode", + "FieldSchema" to "com.fasterxml.jackson.databind.JsonNode", + "ConnectorBuilderProjectTestingValues" to "com.fasterxml.jackson.databind.JsonNode", + "BillingEvent" to "com.fasterxml.jackson.databind.JsonNode", + ) - configOptions = mapOf( - "dateLibrary" to "java8", - "generatePom" to "false", - "interfaceOnly" to "true", - ) -} + generateApiDocumentation = false + + configOptions = + mapOf( + "dateLibrary" to "java8", + "generatePom" to "false", + "interfaceOnly" to "true", + ) + } sourceSets { main { @@ -247,7 +264,7 @@ tasks.withType().configureEach { } tasks.named("compileKotlin") { - dependsOn(genApiClient, genApiServer2) + dependsOn(genApiClient, genApiServer2) } // uses afterEvaluate because at configuration time, the kspKotlin task does not exist. @@ -261,7 +278,7 @@ afterEvaluate { // still runs into spotbug issues. Working theory is that // generated code is being picked up. Disable as a short-term fix. tasks.named("spotbugsMain") { - enabled = false + enabled = false } private fun updateApiClientWithFailsafe(clientPath: String) { @@ -269,24 +286,27 @@ private fun updateApiClientWithFailsafe(clientPath: String) { * UPDATE ApiClient.kt to use Failsafe. */ val apiClientFile = file(clientPath) - var apiClientFileText = apiClientFile.readText() - // replace class declaration - .replace( - "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient) {", - "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient, val policy : RetryPolicy = RetryPolicy.ofDefaults()) {") - // replace execute call - .replace("val response = client.newCall(request).execute()", - """val call = client.newCall(request) + var apiClientFileText = + apiClientFile + .readText() + // replace class declaration + .replace( + "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient) {", + "open class ApiClient(val baseUrl: String, val client: OkHttpClient = defaultClient, val policy : RetryPolicy = RetryPolicy.ofDefaults()) {", + ) + // replace execute call + .replace( + "val response = client.newCall(request).execute()", + """val call = client.newCall(request) val failsafeCall = FailsafeCall.with(policy).compose(call) - val response: Response = failsafeCall.execute()""") - + val response: Response = failsafeCall.execute()""", + ) // add imports if not exist if (!apiClientFileText.contains("import dev.failsafe.RetryPolicy")) { val newImports = """import dev.failsafe.RetryPolicy import dev.failsafe.okhttp.FailsafeCall""" apiClientFileText = apiClientFileText.replaceFirst("import ", "$newImports\nimport ") - } apiClientFile.writeText(apiClientFileText) } @@ -301,13 +321,15 @@ private fun updateDomainClientsWithFailsafe(clientPath: String) { var domainClientFileText = domainClient.readText() // replace class declaration - domainClientFileText = domainClientFileText.replace( - "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)".toRegex(), - "class $1(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)" - ) + domainClientFileText = + domainClientFileText.replace( + "class (\\S+)\\(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient\\) : ApiClient\\(basePath, client\\)" + .toRegex(), + "class $1(basePath: kotlin.String = defaultBasePath, client: OkHttpClient = ApiClient.defaultClient, policy : RetryPolicy = RetryPolicy.ofDefaults()) : ApiClient(basePath, client, policy)", + ) // add imports if not exist - if(!domainClientFileText.contains("import dev.failsafe.RetryPolicy")) { + if (!domainClientFileText.contains("import dev.failsafe.RetryPolicy")) { val newImports = "import dev.failsafe.RetryPolicy" domainClientFileText = domainClientFileText.replaceFirst("import ", "$newImports\nimport ") } @@ -321,9 +343,11 @@ private fun updateDomainClientsToIncludeHttpResponseBodyOnClientException(client val dir = file(clientPath) dir.walk().forEach { domainClient -> if (domainClient.name.endsWith(".kt")) { - val domainClientFileText = domainClient.readText().replace( - "throw ClientException(\"Client error : \${localVarError.statusCode} \${localVarError.message.orEmpty()}\", localVarError.statusCode, localVarResponse)", - "throw ClientException(\"Client error : \${localVarError.statusCode} \${localVarError.message.orEmpty()} \${localVarError.body ?: \"\"}\", localVarError.statusCode, localVarResponse)") + val domainClientFileText = + domainClient.readText().replace( + "throw ClientException(\"Client error : \${localVarError.statusCode} \${localVarError.message.orEmpty()}\", localVarError.statusCode, localVarResponse)", + "throw ClientException(\"Client error : \${localVarError.statusCode} \${localVarError.message.orEmpty()} \${localVarError.body ?: \"\"}\", localVarError.statusCode, localVarResponse)", + ) domainClient.writeText(domainClientFileText) } @@ -336,17 +360,19 @@ private fun configureApiSerializer(serializerPath: String) { */ val serializerFile = file(serializerPath) - val imports = listOf( - "import com.fasterxml.jackson.annotation.JsonInclude", - "import com.fasterxml.jackson.databind.ObjectMapper", - "import com.fasterxml.jackson.databind.DeserializationFeature", - "import com.fasterxml.jackson.databind.SerializationFeature", - "import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule", - "import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper", - "import org.openapitools.jackson.nullable.JsonNullableModule" - ) + val imports = + listOf( + "import com.fasterxml.jackson.annotation.JsonInclude", + "import com.fasterxml.jackson.databind.ObjectMapper", + "import com.fasterxml.jackson.databind.DeserializationFeature", + "import com.fasterxml.jackson.databind.SerializationFeature", + "import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule", + "import com.fasterxml.jackson.module.kotlin.jacksonObjectMapper", + "import org.openapitools.jackson.nullable.JsonNullableModule", + ) - val body = """ + val body = + """ object Serializer { @JvmStatic val jacksonObjectMapper: ObjectMapper = jacksonObjectMapper() @@ -361,13 +387,15 @@ object Serializer { .registerModule(JavaTimeModule()) .registerModule(JsonNullableModule()) } - """.trimIndent() + """.trimIndent() - serializerFile.writeText(""" + serializerFile.writeText( + """ package org.openapitools.client.infrastructure ${imports.joinToString("\n")} $body - """.trimIndent()) -} \ No newline at end of file + """.trimIndent(), + ) +} diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index f51df36d205..756e55ec0b2 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -4584,6 +4584,26 @@ paths: application/json: schema: $ref: "#/components/schemas/CustomerPortalRead" + /v1/billing/complete_checkout_session: + post: + summary: Complete a checkout session + tags: + - billing + - cloud-only + operationId: completeCheckoutSession + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/BillingEvent" + responses: + "200": + description: Successful operation + "400": + description: Invalid payload + "401": + description: Invalid signature + /v1/billing/list_invoices: post: summary: Get a list of past invoices of the customer @@ -10655,6 +10675,10 @@ components: properties: url: type: string + BillingEvent: + description: Generic event for billing, mapped to a JsonNode + type: object + additionalProperties: true # required for mapping to JsonNode InvoiceRead: type: object required: diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java index 0205d1bc73e..ce18889c930 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/scheduling/AirbyteTaskExecutors.java @@ -29,4 +29,10 @@ public interface AirbyteTaskExecutors extends TaskExecutors { */ String PUBLIC_API = "public-api"; + /** + * The name of the {@link java.util.concurrent.ExecutorService} used for webhook endpoints that are + * called by external systems. + */ + String WEBHOOK = "webhook"; + } diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java index b4d6f1fcd0c..7b131c75ab1 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AirbyteHttpRequestFieldExtractor.java @@ -40,14 +40,14 @@ public Optional extractId(final JsonNode json, final String idFieldName) final Optional idValue = extract(json, idFieldName); if (idValue.isEmpty()) { - log.debug("No match for field name '{}' in content '{}'.", idFieldName, json); + log.trace("No match for field name '{}' in content '{}'.", idFieldName, json); } else { - log.debug("Found '{}' for field '{}'", idValue, idFieldName); + log.trace("Found '{}' for field '{}'", idValue, idFieldName); return idValue; } } } catch (final RuntimeException e) { - log.debug("Unable to extract ID field '{}' from content '{}'.", idFieldName, json, e); + log.trace("Unable to extract ID field '{}' from content '{}'.", idFieldName, json, e); } return Optional.empty(); @@ -55,7 +55,7 @@ public Optional extractId(final JsonNode json, final String idFieldName) private Optional extract(JsonNode jsonNode, String idFieldName) { if (ARRAY_FIELDS.contains(idFieldName)) { - log.debug("Try to extract list of ids for field {}", idFieldName); + log.trace("Try to extract list of ids for field {}", idFieldName); return Optional.ofNullable(jsonNode.get(idFieldName)) .map(Jsons::serialize) .filter(StringUtils::hasText); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java index 4db81486f48..a3ad53c4bca 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/support/AuthorizationServerHandler.java @@ -63,14 +63,14 @@ protected FullHttpRequest updateHeaders(final FullHttpRequest httpRequest) { final String contentAsString = StandardCharsets.UTF_8.decode(httpRequest.content().nioBuffer()).toString(); final JsonNode contentAsJson = airbyteHttpRequestFieldExtractor.contentToJson(contentAsString).orElse(null); for (final AuthenticationId authenticationId : AuthenticationId.values()) { - log.debug("Checking HTTP request '{}' for field '{}'...", contentAsString, authenticationId.getFieldName()); + log.trace("Checking HTTP request '{}' for field '{}'...", contentAsString, authenticationId.getFieldName()); final Optional id = airbyteHttpRequestFieldExtractor.extractId(contentAsJson, authenticationId.getFieldName()); if (id.isPresent()) { - log.debug("Found field '{}' with value '{}' in HTTP request body.", authenticationId.getFieldName(), id.get()); + log.trace("Found field '{}' with value '{}' in HTTP request body.", authenticationId.getFieldName(), id.get()); addHeaderToRequest(authenticationId.getHttpHeader(), id.get(), httpRequest); } else { - log.debug("Field '{}' not found in content.", authenticationId.getFieldName()); + log.trace("Field '{}' not found in content.", authenticationId.getFieldName()); } } @@ -88,7 +88,7 @@ protected FullHttpRequest updateHeaders(final FullHttpRequest httpRequest) { protected void addHeaderToRequest(final String headerName, final Object headerValue, final FullHttpRequest httpRequest) { final HttpHeaders httpHeaders = httpRequest.headers(); if (!httpHeaders.contains(headerName)) { - log.debug("Adding HTTP header '{}' with value '{}' to request...", headerName, headerValue); + log.trace("Adding HTTP header '{}' with value '{}' to request...", headerName, headerValue); httpHeaders.add(headerName, headerValue.toString()); } } diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt index 9fcf4c2a532..8dac45f322d 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt @@ -1,5 +1,6 @@ package io.airbyte.server.apis.controllers +import com.fasterxml.jackson.databind.JsonNode import io.airbyte.api.generated.BillingApi import io.airbyte.api.model.generated.CustomerPortalRead import io.airbyte.api.model.generated.CustomerPortalRequestBody @@ -24,43 +25,39 @@ open class BillingController : BillingApi { @ExecuteOn(AirbyteTaskExecutors.IO) override fun getCustomerPortalLink( @Body customerPortalRequestBody: CustomerPortalRequestBody, - ): CustomerPortalRead { - throw ApiNotImplementedInOssProblem() - } + ): CustomerPortalRead = throw ApiNotImplementedInOssProblem() @RequiresIntent(Intent.ManageOrganizationBilling) @Post("/list_invoices") @ExecuteOn(AirbyteTaskExecutors.IO) override fun listPastInvoices( @Body organizationIdRequestBody: OrganizationIdRequestBody, - ): ListInvoicesRead { - throw ApiNotImplementedInOssProblem() - } + ): ListInvoicesRead = throw ApiNotImplementedInOssProblem() @RequiresIntent(Intent.ManageOrganizationBilling) @Post("/payment_information") @ExecuteOn(AirbyteTaskExecutors.IO) override fun getPaymentInformation( @Body organizationIdRequestBody: OrganizationIdRequestBody, - ): PaymentInformationRead { - throw ApiNotImplementedInOssProblem() - } + ): PaymentInformationRead = throw ApiNotImplementedInOssProblem() @RequiresIntent(Intent.ManageOrganizationBilling) @Post("/organization_balance") @ExecuteOn(AirbyteTaskExecutors.IO) override fun getOrganizationBalance( @Body organizationIdRequestBody: OrganizationIdRequestBody, - ): OrganizationBalanceRead { - throw ApiNotImplementedInOssProblem() - } + ): OrganizationBalanceRead = throw ApiNotImplementedInOssProblem() + + @Post("/complete_checkout_session") + @ExecuteOn(AirbyteTaskExecutors.WEBHOOK) + override fun completeCheckoutSession( + @Body event: JsonNode, + ): Unit = throw ApiNotImplementedInOssProblem() @RequiresIntent(Intent.ManageOrganizationBilling) @Post("/trial_status") @ExecuteOn(AirbyteTaskExecutors.IO) override fun getOrganizationTrialStatus( @Body organizationIdRequestBody: OrganizationIdRequestBody, - ): OrganizationTrialStatusRead { - throw ApiNotImplementedInOssProblem() - } + ): OrganizationTrialStatusRead = throw ApiNotImplementedInOssProblem() } diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index 733a0a6aaf2..ded8be3a392 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -28,6 +28,9 @@ micronaut: scheduler: type: fixed n-threads: ${SCHEDULER_TASK_EXECUTOR_THREADS:25} + webhook: + type: fixed + n-threads: ${WEBHOOK_TASK_EXECUTOR_THREADS:3} metrics: enabled: ${MICROMETER_METRICS_ENABLED:false} binders: From afbbca9412aa13cb1b0e43deb2067b0019c6bde1 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Fri, 27 Sep 2024 09:47:26 -0700 Subject: [PATCH 05/36] feat: pr description and pre-existing file handling for edit contributions (#14147) --- .../handlers/ConnectorContributionHandler.kt | 46 +++++++++++++------ .../services/GithubContributionService.kt | 6 +-- .../templates/ContributionTemplates.kt | 10 +++- .../utils/BuilderContributionInfo.kt | 1 + .../pull-request-edit.md.peb | 14 ++++++ .../ConnectorContributionHandlerTest.kt | 37 +++++++++++++-- .../templates/ContributionTemplatesTest.kt | 14 +++++- 7 files changed, 103 insertions(+), 25 deletions(-) create mode 100644 airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb diff --git a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandler.kt b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandler.kt index 2bb535eaa37..95a97775410 100644 --- a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandler.kt +++ b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandler.kt @@ -54,29 +54,45 @@ class ConnectorContributionHandler( } } - private fun getFilePathToGenerationFunctionMap( + fun getFilesToCommitGenerationMap( contributionInfo: BuilderContributionInfo, githubContributionService: GithubContributionService, ): Map String> { - return mapOf( - githubContributionService.connectorReadmePath to { contributionTemplates.renderContributionReadmeMd(contributionInfo) }, - githubContributionService.connectorManifestPath to { contributionInfo.manifestYaml }, - githubContributionService.connectorMetadataPath to { - contributionTemplates.renderContributionMetadataYaml(contributionInfo, githubContributionService) - }, - githubContributionService.connectorIconPath to { contributionTemplates.renderIconSvg() }, - githubContributionService.connectorAcceptanceTestConfigPath to { contributionTemplates.renderAcceptanceTestConfigYaml(contributionInfo) }, - githubContributionService.connectorDocsPath to { contributionTemplates.renderContributionDocsMd(contributionInfo) }, - ) + // Always generate the manifest and metadata files + val filesToCommit = + mutableMapOf( + githubContributionService.connectorManifestPath to { contributionInfo.manifestYaml }, + githubContributionService.connectorMetadataPath to { + contributionTemplates.renderContributionMetadataYaml(contributionInfo, githubContributionService) + }, + ) + + // Others - generate if not pre-existing + val createIfNotExistsFiles = + listOf( + githubContributionService.connectorReadmePath to { contributionTemplates.renderContributionReadmeMd(contributionInfo) }, + githubContributionService.connectorIconPath to { contributionTemplates.renderIconSvg() }, + githubContributionService.connectorAcceptanceTestConfigPath to { contributionTemplates.renderAcceptanceTestConfigYaml(contributionInfo) }, + githubContributionService.connectorDocsPath to { contributionTemplates.renderContributionDocsMd(contributionInfo) }, + ) + + createIfNotExistsFiles.forEach { (filePath, generationFunction) -> + if (!githubContributionService.checkFileExistsOnMain(filePath)) { + filesToCommit[filePath] = generationFunction + } + } + return filesToCommit } private fun getContributionInfo( generateContributionRequestBody: GenerateContributionRequestBody, githubContributionService: GithubContributionService, ): BuilderContributionInfo { + val isEdit = githubContributionService.checkIfConnectorExistsOnMain() val actorDefinitionId = githubContributionService.readConnectorMetadataValue("definitionId") ?: UUID.randomUUID().toString() val authorUsername = githubContributionService.username return BuilderContributionInfo( + isEdit = isEdit, connectorName = generateContributionRequestBody.name, connectorImageName = generateContributionRequestBody.connectorImageName, actorDefinitionId = actorDefinitionId, @@ -99,12 +115,12 @@ class ConnectorContributionHandler( githubContributionService.prepareBranchForContribution() // Commit files to branch - val fileGenerationMap = getFilePathToGenerationFunctionMap(contributionInfo, githubContributionService) - val filesToCommit = fileGenerationMap.mapValues { it.value() } // Calling .value() evaluates the generation functions - githubContributionService.commitFiles("Submission for ${contributionInfo.connectorImageName} from Connector Builder", filesToCommit) + val fileGenerationMap = getFilesToCommitGenerationMap(contributionInfo, githubContributionService) + val filesToCommit = fileGenerationMap.mapValues { it.value.invoke() } + githubContributionService.commitFiles(filesToCommit) // Create / update pull request of branch - val pullRequestDescription = contributionTemplates.renderNewContributionPullRequestDescription(contributionInfo) + val pullRequestDescription = contributionTemplates.renderContributionPullRequestDescription(contributionInfo) val pullRequest = githubContributionService.getOrCreatePullRequest(pullRequestDescription) return GenerateContributionResponse() diff --git a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/services/GithubContributionService.kt b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/services/GithubContributionService.kt index bb730610d08..12e41f55112 100644 --- a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/services/GithubContributionService.kt +++ b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/services/GithubContributionService.kt @@ -229,10 +229,8 @@ class GithubContributionService(var connectorImageName: String, personalAccessTo } } - fun commitFiles( - message: String, - files: Map, - ): GHCommit { + fun commitFiles(files: Map): GHCommit { + val message = "Submission for $connectorImageName from Connector Builder" val branchSha = getBranchSha(contributionBranchName, forkedRepository) val treeBuilder = forkedRepository.createTree().baseTree(branchSha) diff --git a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt index 006426d8518..d1d9a2aa164 100644 --- a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt +++ b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt @@ -159,7 +159,7 @@ class ContributionTemplates { return renderTemplateString("contribution_templates/acceptance-test-config.yml.peb", context) } - fun renderNewContributionPullRequestDescription(contributionInfo: BuilderContributionInfo): String { + fun renderContributionPullRequestDescription(contributionInfo: BuilderContributionInfo): String { val manifestParser = ManifestParser(contributionInfo.manifestYaml) val streams = toTemplateStreams(manifestParser.streams) val specProperties = toTemplateSpecProperties(manifestParser.spec) @@ -171,6 +171,12 @@ class ContributionTemplates { "specProperties" to specProperties, "streams" to streams, ) - return renderTemplateString("contribution_templates/pull-request-new-connector.md.peb", context) + val templatePath = + if (contributionInfo.isEdit) { + "contribution_templates/pull-request-edit.md.peb" + } else { + "contribution_templates/pull-request-new-connector.md.peb" + } + return renderTemplateString(templatePath, context) } } diff --git a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/BuilderContributionInfo.kt b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/BuilderContributionInfo.kt index d53a43222c0..075486bccc6 100644 --- a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/BuilderContributionInfo.kt +++ b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/BuilderContributionInfo.kt @@ -6,6 +6,7 @@ import java.time.LocalDate import java.time.format.DateTimeFormatter data class BuilderContributionInfo( + val isEdit: Boolean, val connectorName: String, val connectorImageName: String, val actorDefinitionId: String, diff --git a/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb b/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb new file mode 100644 index 00000000000..e3216ac6c19 --- /dev/null +++ b/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb @@ -0,0 +1,14 @@ +## What + +This PR updates source {{ connectorName }} ({{ connectorImageName }}). + +The contributor provided the following description of the change: + +{{ description }} + +## Reviewer checklist +- [ ] Resolve any merge conflicts and validate file diffs (make sure the PR only includes changes intended by the contributor) +- [ ] After reviewing the changes, run the [`bump-version` Airbyte-CI command](https://github.com/airbytehq/airbyte/blob/master/airbyte-ci/connectors/pipelines/README.md#connectors-bump-version-command) locally to update the version of the connector according to the [versioning guidelines](https://docs.airbyte.io/contributor-guide/versioning-guidelines). Add `breakingChanges` to metadata if necessary. +- [ ] Ensure connector docs are up to date with any changes +- [ ] Run `/format-fix` to resolve any formatting errors +- [ ] Click into the CI workflows that wait for a maintainer to run them, which should trigger CI runs diff --git a/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandlerTest.kt b/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandlerTest.kt index abc979d20a4..a6cc2059618 100644 --- a/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandlerTest.kt +++ b/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/handlers/ConnectorContributionHandlerTest.kt @@ -5,6 +5,7 @@ package io.airbyte.connector_builder.handlers import io.airbyte.connector_builder.api.model.generated.CheckContributionRequestBody import io.airbyte.connector_builder.services.GithubContributionService import io.airbyte.connector_builder.templates.ContributionTemplates +import io.airbyte.connector_builder.utils.BuilderContributionInfo import io.mockk.every import io.mockk.mockk import io.mockk.mockkConstructor @@ -31,7 +32,7 @@ class ConnectorContributionHandlerTest { } @Test - fun `returns details of an existing connector if found in target repository`() { + fun `checkContribution returns details of an existing connector if found in target repository`() { every { anyConstructed().checkIfConnectorExistsOnMain() } returns true every { anyConstructed().readConnectorMetadataValue("name") } returns "Test Connector" @@ -43,7 +44,7 @@ class ConnectorContributionHandlerTest { } @Test - fun `returns 'false' for connectorExists if connector not found in target repository`() { + fun `checkContribution returns 'false' for connectorExists if connector not found in target repository`() { every { anyConstructed().checkIfConnectorExistsOnMain() } returns false val response = connectorContributionHandler.checkContribution(requestBodyMock) @@ -54,7 +55,7 @@ class ConnectorContributionHandlerTest { } @Test - fun `throws IllegalArgumentException for invalid connectorImageName`() { + fun `checkContribution throws IllegalArgumentException for invalid connectorImageName`() { val invalidRequestBodyMock = mockk() every { invalidRequestBodyMock.connectorImageName } returns "not-a-valid_image_name" @@ -65,4 +66,34 @@ class ConnectorContributionHandlerTest { assertEquals("not-a-valid_image_name is not a valid image name.", exception.message) } + + @Test + fun `getFilesToCommitGenerationMap gets all files if they don't exist`() { + val contributionInfo = mockk(relaxed = true) + val githubContributionService = mockk(relaxed = true) + every { githubContributionService.connectorManifestPath } returns "manifestPath" + every { githubContributionService.connectorReadmePath } returns "readmePath" + every { githubContributionService.connectorMetadataPath } returns "metadataPath" + every { githubContributionService.connectorIconPath } returns "iconPath" + every { githubContributionService.connectorAcceptanceTestConfigPath } returns "acceptanceTestConfigPath" + every { githubContributionService.connectorDocsPath } returns "docsPath" + every { githubContributionService.checkFileExistsOnMain(any()) } returns false + + val filesToCommit = connectorContributionHandler.getFilesToCommitGenerationMap(contributionInfo, githubContributionService) + assertEquals(6, filesToCommit.size) + assertEquals(setOf("manifestPath", "readmePath", "metadataPath", "iconPath", "acceptanceTestConfigPath", "docsPath"), filesToCommit.keys) + } + + @Test + fun `getFilesToCommitGenerationMap gets only manifest and metadata file if all files exist`() { + val contributionInfo = mockk(relaxed = true) + val githubContributionService = mockk(relaxed = true) + every { githubContributionService.connectorManifestPath } returns "manifestPath" + every { githubContributionService.connectorMetadataPath } returns "metadataPath" + every { githubContributionService.checkFileExistsOnMain(any()) } returns true + + val filesToCommit = connectorContributionHandler.getFilesToCommitGenerationMap(contributionInfo, githubContributionService) + assertEquals(2, filesToCommit.size) + assertEquals(setOf("manifestPath", "metadataPath"), filesToCommit.keys) + } } diff --git a/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt b/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt index 85d58dba26e..0b775de0566 100644 --- a/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt +++ b/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt @@ -26,6 +26,7 @@ class ContributionTemplatesTest { val newConnectorContributionInfo = BuilderContributionInfo( + isEdit = false, connectorName = "Test Connector", connectorImageName = "test", actorDefinitionId = "test-uuid", @@ -86,7 +87,7 @@ class ContributionTemplatesTest { val contributionTemplates = ContributionTemplates() val jacksonYaml = jacksonSerialize(serialzedYamlContent) val manifestParser = ManifestParser(jacksonYaml) - val prDescription = contributionTemplates.renderNewContributionPullRequestDescription(newConnectorContributionInfo) + val prDescription = contributionTemplates.renderContributionPullRequestDescription(newConnectorContributionInfo) assert(prDescription.contains(newConnectorContributionInfo.connectorName)) assert(prDescription.contains(newConnectorContributionInfo.connectorImageName)) @@ -106,6 +107,17 @@ class ContributionTemplatesTest { } } + @Test + fun `test edit PR description`() { + val editConnectorContributionInfo = newConnectorContributionInfo.copy(isEdit = true) + val contributionTemplates = ContributionTemplates() + val prDescription = contributionTemplates.renderContributionPullRequestDescription(editConnectorContributionInfo) + + assert(prDescription.contains(editConnectorContributionInfo.connectorName)) + assert(prDescription.contains(editConnectorContributionInfo.connectorImageName)) + assert(prDescription.contains(editConnectorContributionInfo.description)) + } + @Test fun `test privateKeyToString with Array`() { val contributionTemplates = ContributionTemplates() From a101ce4b2346a1d91f3203208f83347d3396a555 Mon Sep 17 00:00:00 2001 From: Jimmy Ma Date: Fri, 27 Sep 2024 10:28:13 -0700 Subject: [PATCH 06/36] chore: use same metrics stack as the rest of the app (#14159) --- .../workload/config/TemporalQueueBeanFactory.kt | 2 +- .../workload/handler/WorkloadHandlerImpl.kt | 13 +++++-------- .../workload/handler/WorkloadHandlerImplTest.kt | 14 ++++++-------- 3 files changed, 12 insertions(+), 17 deletions(-) diff --git a/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/config/TemporalQueueBeanFactory.kt b/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/config/TemporalQueueBeanFactory.kt index 7ab9f2cf553..0f6f5fd1117 100644 --- a/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/config/TemporalQueueBeanFactory.kt +++ b/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/config/TemporalQueueBeanFactory.kt @@ -25,7 +25,7 @@ import java.time.Duration class TemporalQueueBeanFactory { @Singleton fun createMetricClient(): MetricClient { - MetricClientFactory.initialize(MetricEmittingApps.SERVER) + MetricClientFactory.initialize(MetricEmittingApps.WORKLOAD_API) return MetricClientFactory.getMetricClient() } diff --git a/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/handler/WorkloadHandlerImpl.kt b/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/handler/WorkloadHandlerImpl.kt index abb3977cd7d..9509436dac7 100644 --- a/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/handler/WorkloadHandlerImpl.kt +++ b/airbyte-workload-api-server/src/main/kotlin/io/airbyte/workload/handler/WorkloadHandlerImpl.kt @@ -5,7 +5,6 @@ import io.airbyte.api.client.model.generated.SignalInput import io.airbyte.commons.json.Jsons import io.airbyte.config.WorkloadType import io.airbyte.metrics.lib.MetricAttribute -import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricTags import io.airbyte.metrics.lib.OssMetricsRegistry import io.airbyte.workload.api.domain.Workload @@ -13,6 +12,7 @@ import io.airbyte.workload.api.domain.WorkloadLabel import io.airbyte.workload.errors.ConflictException import io.airbyte.workload.errors.InvalidStatusTransitionException import io.airbyte.workload.errors.NotFoundException +import io.airbyte.workload.metrics.CustomMetricPublisher import io.airbyte.workload.repository.WorkloadRepository import io.airbyte.workload.repository.domain.WorkloadStatus import io.github.oshai.kotlinlogging.KotlinLogging @@ -29,7 +29,7 @@ private val logger = KotlinLogging.logger {} class WorkloadHandlerImpl( private val workloadRepository: WorkloadRepository, private val airbyteApi: AirbyteApiClient, - private val metricClient: MetricClient, + private val metricClient: CustomMetricPublisher, ) : WorkloadHandler { companion object { val ACTIVE_STATUSES: List = @@ -333,8 +333,7 @@ class WorkloadHandlerImpl( } catch (e: Exception) { logger.error(e) { "Failed to deserialize signal payload: $signalPayload" } metricClient.count( - OssMetricsRegistry.WORKLOADS_SIGNAL, - 1, + OssMetricsRegistry.WORKLOADS_SIGNAL.metricName, MetricAttribute(MetricTags.STATUS, MetricTags.FAILURE), MetricAttribute(MetricTags.FAILURE_TYPE, "deserialization"), ) @@ -350,16 +349,14 @@ class WorkloadHandlerImpl( ), ) metricClient.count( - OssMetricsRegistry.WORKLOADS_SIGNAL, - 1, + OssMetricsRegistry.WORKLOADS_SIGNAL.metricName, MetricAttribute(MetricTags.WORKLOAD_TYPE, signalInput.workflowType), MetricAttribute(MetricTags.STATUS, MetricTags.SUCCESS), ) } catch (e: Exception) { logger.error(e) { "Failed to send signal for the payload: $signalPayload" } metricClient.count( - OssMetricsRegistry.WORKLOADS_SIGNAL, - 1, + OssMetricsRegistry.WORKLOADS_SIGNAL.metricName, MetricAttribute(MetricTags.WORKLOAD_TYPE, signalInput.workflowType), MetricAttribute(MetricTags.STATUS, MetricTags.FAILURE), MetricAttribute(MetricTags.FAILURE_TYPE, e.message), diff --git a/airbyte-workload-api-server/src/test/kotlin/io/airbyte/workload/handler/WorkloadHandlerImplTest.kt b/airbyte-workload-api-server/src/test/kotlin/io/airbyte/workload/handler/WorkloadHandlerImplTest.kt index d9883fa1342..754dc7cbd20 100644 --- a/airbyte-workload-api-server/src/test/kotlin/io/airbyte/workload/handler/WorkloadHandlerImplTest.kt +++ b/airbyte-workload-api-server/src/test/kotlin/io/airbyte/workload/handler/WorkloadHandlerImplTest.kt @@ -6,7 +6,6 @@ import io.airbyte.api.client.model.generated.SignalInput import io.airbyte.commons.json.Jsons import io.airbyte.config.SignalInput.Companion.SYNC_WORKFLOW import io.airbyte.metrics.lib.MetricAttribute -import io.airbyte.metrics.lib.MetricClient import io.airbyte.metrics.lib.MetricTags import io.airbyte.metrics.lib.OssMetricsRegistry import io.airbyte.workload.api.domain.WorkloadLabel @@ -23,6 +22,7 @@ import io.airbyte.workload.handler.WorkloadHandlerImplTest.Fixtures.verifyApi import io.airbyte.workload.handler.WorkloadHandlerImplTest.Fixtures.verifyFailedSignal import io.airbyte.workload.handler.WorkloadHandlerImplTest.Fixtures.workloadHandler import io.airbyte.workload.handler.WorkloadHandlerImplTest.Fixtures.workloadRepository +import io.airbyte.workload.metrics.CustomMetricPublisher import io.airbyte.workload.repository.WorkloadRepository import io.airbyte.workload.repository.domain.Workload import io.airbyte.workload.repository.domain.WorkloadStatus @@ -435,13 +435,12 @@ class WorkloadHandlerImplTest { ) every { workloadRepository.update(any(), ofType(WorkloadStatus::class), eq("test"), eq("test cancel"), null) } just Runs - every { metricClient.count(OssMetricsRegistry.WORKLOADS_SIGNAL, 1, any(), any()) } returns Unit + every { metricClient.count(OssMetricsRegistry.WORKLOADS_SIGNAL.metricName, any(), any()) } returns Unit workloadHandler.cancelWorkload(WORKLOAD_ID, "test", "test cancel") verify { workloadRepository.update(eq(WORKLOAD_ID), eq(WorkloadStatus.CANCELLED), eq("test"), eq("test cancel"), null) } verify { metricClient.count( - OssMetricsRegistry.WORKLOADS_SIGNAL, - 1, + OssMetricsRegistry.WORKLOADS_SIGNAL.metricName, MetricAttribute(MetricTags.STATUS, MetricTags.FAILURE), MetricAttribute(MetricTags.FAILURE_TYPE, "deserialization"), ) @@ -739,7 +738,7 @@ class WorkloadHandlerImplTest { @Test fun `offsetDateTime method should always return current time`() { - val workloadHandlerImpl = WorkloadHandlerImpl(mockk(), mockk(), mockk()) + val workloadHandlerImpl = WorkloadHandlerImpl(mockk(), mockk(), mockk()) val offsetDateTime = workloadHandlerImpl.offsetDateTime() Thread.sleep(10) val offsetDateTimeAfter10Ms = workloadHandlerImpl.offsetDateTime() @@ -748,7 +747,7 @@ class WorkloadHandlerImplTest { object Fixtures { val workloadRepository = mockk() - val metricClient: MetricClient = mockk(relaxed = true) + val metricClient: CustomMetricPublisher = mockk(relaxed = true) private val airbyteApi: AirbyteApiClient = mockk() val signalApi: SignalApi = mockk() const val WORKLOAD_ID = "test" @@ -784,8 +783,7 @@ class WorkloadHandlerImplTest { fun verifyFailedSignal() { verify { metricClient.count( - OssMetricsRegistry.WORKLOADS_SIGNAL, - 1, + OssMetricsRegistry.WORKLOADS_SIGNAL.metricName, MetricAttribute(MetricTags.WORKLOAD_TYPE, signalInput.workflowType), MetricAttribute(MetricTags.STATUS, MetricTags.FAILURE), any(), From 2ed01e554d576bd60011583ea988aeac8980f2f0 Mon Sep 17 00:00:00 2001 From: Ryan Br Date: Fri, 27 Sep 2024 10:37:33 -0700 Subject: [PATCH 07/36] fix: pass replication tolerations on worker config to replication pod. (#14161) --- .../config/ContainerConfigBeanFactory.kt | 19 +++++++++++++++++++ .../pods/factories/ReplicationPodFactory.kt | 3 +++ 2 files changed, 22 insertions(+) diff --git a/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt b/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt index 69f47d2e05c..b942251a4f4 100644 --- a/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/config/ContainerConfigBeanFactory.kt @@ -192,6 +192,25 @@ class ContainerConfigBeanFactory { .withMemoryRequest(memoryRequest) } + @Singleton + @Named("replicationPodTolerations") + fun replicationPodTolerations( + @Named("replicationWorkerConfigs") workerConfigs: WorkerConfigs, + ): List { + if (workerConfigs.workerKubeTolerations.isNullOrEmpty()) { + return listOf() + } + return workerConfigs.workerKubeTolerations + .map { t -> + TolerationBuilder() + .withKey(t.key) + .withEffect(t.effect) + .withOperator(t.operator) + .withValue(t.value) + .build() + } + } + @Singleton @Named("checkPodTolerations") fun checkPodTolerations( diff --git a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt index 7fdbb49f38e..fd332ad6dad 100644 --- a/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt +++ b/airbyte-workload-launcher/src/main/kotlin/pods/factories/ReplicationPodFactory.kt @@ -10,6 +10,7 @@ import io.fabric8.kubernetes.api.model.LocalObjectReference import io.fabric8.kubernetes.api.model.Pod import io.fabric8.kubernetes.api.model.PodBuilder import io.fabric8.kubernetes.api.model.ResourceRequirements +import io.fabric8.kubernetes.api.model.Toleration import io.micronaut.context.annotation.Value import jakarta.inject.Named import jakarta.inject.Singleton @@ -24,6 +25,7 @@ class ReplicationPodFactory( private val workloadSecurityContextProvider: WorkloadSecurityContextProvider, @Value("\${airbyte.worker.job.kube.serviceAccount}") private val serviceAccount: String?, @Named("replicationImagePullSecrets") private val imagePullSecrets: List, + @Named("replicationPodTolerations") private val tolerations: List, ) { fun create( podName: String, @@ -90,6 +92,7 @@ class ReplicationPodFactory( .withImagePullSecrets(imagePullSecrets) .withVolumes(replicationVolumes.allVolumes) .withNodeSelector(nodeSelectors) + .withTolerations(tolerations) .withAutomountServiceAccountToken(false) .withSecurityContext(workloadSecurityContextProvider.defaultPodSecurityContext()) .endSpec() From fb695e5798c303c05df5a314fdc051714199a4f4 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Fri, 27 Sep 2024 10:40:23 -0700 Subject: [PATCH 08/36] chore: default edit in builder feature flag to true (#14158) --- airbyte-webapp/src/hooks/services/Experiment/experiments.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts index 6ac6577edff..c773679c49f 100644 --- a/airbyte-webapp/src/hooks/services/Experiment/experiments.ts +++ b/airbyte-webapp/src/hooks/services/Experiment/experiments.ts @@ -46,7 +46,7 @@ export const defaultExperimentValues: Experiments = { "connector.suggestedDestinationConnectors": "", "connector.suggestedSourceConnectors": "", "connectorBuilder.aiAssist.enabled": false, - "connectorBuilder.contributeEditsToMarketplace": false, + "connectorBuilder.contributeEditsToMarketplace": true, "settings.breakingChangeNotifications": false, "settings.downloadDiagnostics": false, "settings.showAdvancedSettings": false, From 57319f7ebc8626ca93b600e6c593e78fd24a705d Mon Sep 17 00:00:00 2001 From: Ryan Br Date: Fri, 27 Sep 2024 11:27:17 -0700 Subject: [PATCH 09/36] fix: discover and check workloads should cancel when their parent workflow is cancelled. (#14108) --- .../workers/sync/WorkloadApiWorker.java | 4 +- .../io/airbyte/workers/sync/WorkloadClient.kt | 55 +++++- .../workers/workload/WorkloadConstants.kt | 1 + .../workers/sync/WorkloadClientTest.kt | 107 ++++++++++++ .../CheckConnectionActivityImpl.java | 9 +- .../catalog/DiscoverCatalogActivityImpl.java | 8 +- .../CheckConnectionActivityTest.java | 163 ------------------ .../connection/CheckConnectionActivityTest.kt | 153 ++++++++++++++++ .../catalog/DiscoverCatalogActivityTest.kt | 64 ++++--- .../DiscoverCatalogHelperActivityTest.kt | 0 10 files changed, 360 insertions(+), 204 deletions(-) create mode 100644 airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/sync/WorkloadClientTest.kt delete mode 100644 airbyte-workers/src/test/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.java create mode 100644 airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt rename airbyte-workers/src/test/{java => kotlin}/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt (68%) rename airbyte-workers/src/test/{java => kotlin}/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityTest.kt (100%) diff --git a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java index ea628648be8..37eb7e9c744 100644 --- a/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java +++ b/airbyte-commons-worker/src/main/java/io/airbyte/workers/sync/WorkloadApiWorker.java @@ -34,6 +34,7 @@ import io.airbyte.workers.models.ReplicationActivityInput; import io.airbyte.workers.pod.Metadata; import io.airbyte.workers.workload.JobOutputDocStore; +import io.airbyte.workers.workload.WorkloadConstants; import io.airbyte.workers.workload.WorkloadIdGenerator; import io.airbyte.workers.workload.exception.DocStoreAccessException; import io.airbyte.workload.api.client.WorkloadApiClient; @@ -193,7 +194,8 @@ public void waitForWorkload(final String workloadId) { public void cancelWorkload(final String workloadId) throws IOException { callWithRetry(() -> { - workloadApiClient.getWorkloadApi().workloadCancel(new WorkloadCancelRequest(workloadId, "user requested", "WorkloadApiWorker")); + workloadApiClient.getWorkloadApi().workloadCancel(new WorkloadCancelRequest( + workloadId, WorkloadConstants.WORKLOAD_CANCELLED_BY_USER_REASON, "WorkloadApiWorker")); return true; }); } diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt index 9b3c8e5df4c..7f16bf98cc1 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/sync/WorkloadClient.kt @@ -1,17 +1,22 @@ package io.airbyte.workers.sync +import io.airbyte.commons.temporal.HeartbeatUtils import io.airbyte.config.ConnectorJobOutput import io.airbyte.config.FailureReason import io.airbyte.workers.workload.JobOutputDocStore +import io.airbyte.workers.workload.WorkloadConstants.WORKLOAD_CANCELLED_BY_USER_REASON import io.airbyte.workload.api.client.WorkloadApiClient import io.airbyte.workload.api.client.model.generated.Workload +import io.airbyte.workload.api.client.model.generated.WorkloadCancelRequest import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest import io.airbyte.workload.api.client.model.generated.WorkloadStatus import io.github.oshai.kotlinlogging.KotlinLogging import io.micronaut.http.HttpStatus +import io.temporal.activity.ActivityExecutionContext import jakarta.inject.Singleton import org.openapitools.client.infrastructure.ClientException import java.io.IOException +import java.util.concurrent.atomic.AtomicReference import kotlin.time.Duration.Companion.seconds private val logger = KotlinLogging.logger { } @@ -23,6 +28,7 @@ private val logger = KotlinLogging.logger { } @Singleton class WorkloadClient(private val workloadApiClient: WorkloadApiClient, private val jobOutputDocStore: JobOutputDocStore) { companion object { + const val CANCELLATION_SOURCE_STR = "Cancellation callback." val TERMINAL_STATUSES = setOf(WorkloadStatus.SUCCESS, WorkloadStatus.FAILURE, WorkloadStatus.CANCELLED) } @@ -49,19 +55,10 @@ class WorkloadClient(private val workloadApiClient: WorkloadApiClient, private v fun waitForWorkload( workloadId: String, pollingFrequencyInSeconds: Int, - ) { - waitForWorkload(workloadId, pollingFrequencyInSeconds) {} - } - - fun waitForWorkload( - workloadId: String, - pollingFrequencyInSeconds: Int, - loopingAction: () -> Unit, ) { try { var workload = workloadApiClient.workloadApi.workloadGet(workloadId) while (!isWorkloadTerminal(workload)) { - loopingAction() Thread.sleep(pollingFrequencyInSeconds.seconds.inWholeMilliseconds) workload = workloadApiClient.workloadApi.workloadGet(workloadId) } @@ -84,6 +81,46 @@ class WorkloadClient(private val workloadApiClient: WorkloadApiClient, private v ) } + /** + * Attempts to cancel the workload and swallows errors if it fails. + */ + fun cancelWorkloadBestEffort(req: WorkloadCancelRequest) { + try { + workloadApiClient.workloadApi.workloadCancel(req) + } catch (e: ClientException) { + when (e.statusCode) { + HttpStatus.GONE.code -> logger.warn { "Workload: ${req.workloadId} already terminal. Cancellation is a no-op." } + HttpStatus.NOT_FOUND.code -> logger.warn { "Workload: ${req.workloadId} not yet created. Cancellation is a no-op." } + else -> logger.error(e) { "Workload: ${req.workloadId} failed to be cancelled due to API error. Status code: ${e.statusCode}" } + } + } catch (e: Exception) { + logger.error(e) { "Workload: ${req.workloadId} failed to be cancelled." } + } + } + + /** + * Creates and waits for workload propagating any Temporal level cancellations (detected by the heartbeats) to the workload. + */ + fun runWorkloadWithCancellationHeartbeat( + createReq: WorkloadCreateRequest, + checkFrequencyInSeconds: Int, + context: ActivityExecutionContext, + ) { + val cancellationCallback = + Runnable { + val failReq = WorkloadCancelRequest(createReq.workloadId, WORKLOAD_CANCELLED_BY_USER_REASON, CANCELLATION_SOURCE_STR) + cancelWorkloadBestEffort(failReq) + } + HeartbeatUtils.withBackgroundHeartbeat( + AtomicReference(cancellationCallback), + { + createWorkload(createReq) + waitForWorkload(createReq.workloadId, checkFrequencyInSeconds) + }, + context, + ) + } + private fun handleMissingConnectorJobOutput( workloadId: String, t: Throwable?, diff --git a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadConstants.kt b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadConstants.kt index daf80fec127..3437a8cd789 100644 --- a/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadConstants.kt +++ b/airbyte-commons-worker/src/main/kotlin/io/airbyte/workers/workload/WorkloadConstants.kt @@ -6,4 +6,5 @@ package io.airbyte.workers.workload object WorkloadConstants { const val WORKER_V2_MICRONAUT_ENV = "worker-v2" + const val WORKLOAD_CANCELLED_BY_USER_REASON = "User requested." } diff --git a/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/sync/WorkloadClientTest.kt b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/sync/WorkloadClientTest.kt new file mode 100644 index 00000000000..a2d47ce2b55 --- /dev/null +++ b/airbyte-commons-worker/src/test/kotlin/io/airbyte/workers/sync/WorkloadClientTest.kt @@ -0,0 +1,107 @@ +package io.airbyte.workers.sync + +import io.airbyte.commons.temporal.HeartbeatUtils +import io.airbyte.workers.sync.WorkloadClient.Companion.CANCELLATION_SOURCE_STR +import io.airbyte.workers.workload.JobOutputDocStore +import io.airbyte.workers.workload.WorkloadConstants.WORKLOAD_CANCELLED_BY_USER_REASON +import io.airbyte.workload.api.client.WorkloadApiClient +import io.airbyte.workload.api.client.generated.WorkloadApi +import io.airbyte.workload.api.client.model.generated.WorkloadCancelRequest +import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest +import io.airbyte.workload.api.client.model.generated.WorkloadPriority +import io.airbyte.workload.api.client.model.generated.WorkloadType +import io.mockk.every +import io.mockk.mockk +import io.mockk.mockkStatic +import io.mockk.slot +import io.mockk.spyk +import io.mockk.verify +import io.temporal.activity.ActivityExecutionContext +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import org.junit.jupiter.api.assertDoesNotThrow +import java.util.concurrent.Callable +import java.util.concurrent.atomic.AtomicReference + +class WorkloadClientTest { + private val apiClientWrapper: WorkloadApiClient = mockk() + private val apiClient: WorkloadApi = mockk() + private val jobOutputDocStore: JobOutputDocStore = mockk() + + private lateinit var client: WorkloadClient + + @BeforeEach + fun setup() { + every { apiClientWrapper.workloadApi } returns apiClient + + client = spyk(WorkloadClient(apiClientWrapper, jobOutputDocStore)) + } + + @Test + fun `cancelWorkloadBestEffort attempts to cancel the workflow`() { + val req = WorkloadCancelRequest("workloadId", "reason", "source") + + every { apiClient.workloadCancel(req) } returns Unit + + client.cancelWorkloadBestEffort(req) + + verify { apiClient.workloadCancel(req) } + } + + @Test + fun `cancelWorkloadBestEffort swallows exceptions`() { + val req = WorkloadCancelRequest("workloadId", "reason", "source") + + every { apiClient.workloadCancel(req) } throws Exception("bang") + + assertDoesNotThrow { + client.cancelWorkloadBestEffort(req) + } + + verify { apiClient.workloadCancel(req) } + } + + @Test + fun `runWorkloadWithCancellationHeartbeat wraps workload creation and waiting in a heartbeating thread that cancels the workload on failure`() { + val cancellationCallbackSlot = slot>() + val callableSlot = slot>() + + mockkStatic(HeartbeatUtils::class) + every { HeartbeatUtils.withBackgroundHeartbeat(capture(cancellationCallbackSlot), capture(callableSlot), any()) } returns Unit + + val createReq = + WorkloadCreateRequest( + workloadId = "workloadId", + labels = ArrayList(), + workloadInput = "", + logPath = "", + geography = "", + type = WorkloadType.CHECK, + priority = WorkloadPriority.DEFAULT, + ) + val checkFreqSecs = 10 + val executionContext: ActivityExecutionContext = + mockk { + every { heartbeat(null) } returns Unit + } + + client.runWorkloadWithCancellationHeartbeat(createReq, checkFreqSecs, executionContext) + // validate we call the wrapper + verify { HeartbeatUtils.withBackgroundHeartbeat(any(), any>(), executionContext) } + // validate the wrapped cancellation callback executes the code we expect + every { client.cancelWorkloadBestEffort(any()) } returns Unit + + cancellationCallbackSlot.captured.get().run() + val expectedCancellationPayload = WorkloadCancelRequest(createReq.workloadId, WORKLOAD_CANCELLED_BY_USER_REASON, CANCELLATION_SOURCE_STR) + + verify { client.cancelWorkloadBestEffort(expectedCancellationPayload) } + // validate the wrapped 'callable' callback executes the code we expect + every { client.createWorkload(createReq) } returns Unit + every { client.waitForWorkload(createReq.workloadId, checkFreqSecs) } returns Unit + + callableSlot.captured.call() + + verify { client.createWorkload(createReq) } + verify { client.waitForWorkload(createReq.workloadId, checkFreqSecs) } + } +} diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java index 260f98064c9..34cbf06a514 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityImpl.java @@ -35,6 +35,8 @@ import io.airbyte.workload.api.client.model.generated.WorkloadLabel; import io.airbyte.workload.api.client.model.generated.WorkloadPriority; import io.airbyte.workload.api.client.model.generated.WorkloadType; +import io.temporal.activity.Activity; +import io.temporal.activity.ActivityExecutionContext; import io.temporal.activity.ActivityOptions; import jakarta.inject.Named; import jakarta.inject.Singleton; @@ -116,11 +118,12 @@ public ConnectorJobOutput runWithWorkload(final CheckConnectionInput input) thro null, null); - workloadClient.createWorkload(workloadCreateRequest); - final int checkFrequencyInSeconds = featureFlagClient.intVariation(WorkloadCheckFrequencyInSeconds.INSTANCE, new Workspace(workspaceId)); - workloadClient.waitForWorkload(workloadId, checkFrequencyInSeconds); + + final ActivityExecutionContext context = Activity.getExecutionContext(); + + workloadClient.runWorkloadWithCancellationHeartbeat(workloadCreateRequest, checkFrequencyInSeconds, context); final var output = workloadClient.getConnectorJobOutput( workloadId, diff --git a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java index 285fa462f10..c4a9f5d428e 100644 --- a/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java +++ b/airbyte-workers/src/main/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityImpl.java @@ -109,16 +109,12 @@ public ConnectorJobOutput runWithWorkload(final DiscoverCatalogInput input) thro null, null); - workloadClient.createWorkload(workloadCreateRequest); - final int checkFrequencyInSeconds = featureFlagClient.intVariation(WorkloadCheckFrequencyInSeconds.INSTANCE, new Workspace(workspaceId)); final ActivityExecutionContext context = getActivityContext(); - workloadClient.waitForWorkload(workloadId, checkFrequencyInSeconds, () -> { - context.heartbeat("waiting for workload to complete"); - return null; - }); + + workloadClient.runWorkloadWithCancellationHeartbeat(workloadCreateRequest, checkFrequencyInSeconds, context); return workloadClient.getConnectorJobOutput( workloadId, diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.java b/airbyte-workers/src/test/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.java deleted file mode 100644 index 526363b3a74..00000000000 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.workers.temporal.check.connection; - -import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_LOG_FILENAME; -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.doReturn; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.spy; -import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.when; - -import io.airbyte.api.client.AirbyteApiClient; -import io.airbyte.api.client.model.generated.Geography; -import io.airbyte.commons.logging.LogClientManager; -import io.airbyte.config.ActorContext; -import io.airbyte.config.ActorType; -import io.airbyte.config.ConnectorJobOutput; -import io.airbyte.config.StandardCheckConnectionInput; -import io.airbyte.config.StandardCheckConnectionOutput; -import io.airbyte.config.WorkloadPriority; -import io.airbyte.featureflag.ConfigFileClient; -import io.airbyte.featureflag.FeatureFlagClient; -import io.airbyte.metrics.lib.MetricClient; -import io.airbyte.persistence.job.models.IntegrationLauncherConfig; -import io.airbyte.persistence.job.models.JobRunConfig; -import io.airbyte.workers.models.CheckConnectionInput; -import io.airbyte.workers.sync.WorkloadClient; -import io.airbyte.workers.workload.JobOutputDocStore; -import io.airbyte.workers.workload.WorkloadIdGenerator; -import io.airbyte.workload.api.client.WorkloadApiClient; -import io.airbyte.workload.api.client.generated.WorkloadApi; -import io.airbyte.workload.api.client.model.generated.Workload; -import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest; -import io.airbyte.workload.api.client.model.generated.WorkloadStatus; -import io.airbyte.workload.api.client.model.generated.WorkloadType; -import io.temporal.activity.ActivityOptions; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.Optional; -import java.util.UUID; -import org.junit.jupiter.api.BeforeEach; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.ArgumentCaptor; -import org.mockito.Captor; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class CheckConnectionActivityTest { - - private final Path workspaceRoot = Path.of("workspace-root"); - private final AirbyteApiClient airbyteApiClient = mock(AirbyteApiClient.class); - private final WorkloadApi workloadApi = mock(WorkloadApi.class); - private final WorkloadApiClient workloadApiClient = mock(WorkloadApiClient.class); - private final WorkloadIdGenerator workloadIdGenerator = mock(WorkloadIdGenerator.class); - private final JobOutputDocStore jobOutputDocStore = mock(JobOutputDocStore.class); - private final FeatureFlagClient featureFlagClient = mock(ConfigFileClient.class); - private final LogClientManager logClientManager = mock(LogClientManager.class); - - private CheckConnectionActivityImpl checkConnectionActivity; - - @Captor - ArgumentCaptor workloadCaptor; - - private static final UUID ACTOR_DEFINITION_ID = UUID.randomUUID(); - private static final Long ATTEMPT_NUMBER = 42L; - private static final int ATTEMPT_NUMBER_AS_INT = Math.toIntExact(ATTEMPT_NUMBER); - private static final UUID CONNECTION_ID = UUID.randomUUID(); - private static final String JOB_ID = "jobId"; - private static final String WORKLOAD_ID = "workloadId"; - private static final UUID WORKSPACE_ID = UUID.randomUUID(); - - @BeforeEach - void init() throws Exception { - checkConnectionActivity = spy(new CheckConnectionActivityImpl( - workspaceRoot, - airbyteApiClient, - featureFlagClient, - new WorkloadClient(workloadApiClient, jobOutputDocStore), - workloadIdGenerator, - mock(MetricClient.class), - mock(ActivityOptions.class), - logClientManager)); - - when(workloadIdGenerator.generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT)) - .thenReturn(WORKLOAD_ID); - doReturn(Geography.US).when(checkConnectionActivity).getGeography(Optional.of(CONNECTION_ID), Optional.of(WORKSPACE_ID)); - when(workloadApi.workloadGet(WORKLOAD_ID)) - .thenReturn(getWorkloadWithStatus(WorkloadStatus.RUNNING)) - .thenReturn(getWorkloadWithStatus(WorkloadStatus.SUCCESS)); - when(workloadApiClient.getWorkloadApi()).thenReturn(workloadApi); - when(logClientManager.fullLogPath(any())).then(i -> Path.of(i.getArguments()[0].toString(), DEFAULT_LOG_FILENAME).toString()); - } - - @Test - void testStartWithWorkload() throws Exception { - final CheckConnectionInput input = getCheckInput(); - - when(jobOutputDocStore.read(WORKLOAD_ID)).thenReturn(Optional.of(new ConnectorJobOutput() - .withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) - .withCheckConnection(new StandardCheckConnectionOutput() - .withStatus(StandardCheckConnectionOutput.Status.SUCCEEDED)))); - - final ConnectorJobOutput output = checkConnectionActivity.runWithWorkload(input); - verify(workloadIdGenerator).generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT); - verify(workloadApi).workloadCreate(workloadCaptor.capture()); - assertEquals(WORKLOAD_ID, workloadCaptor.getValue().getWorkloadId()); - assertEquals(WorkloadType.CHECK, workloadCaptor.getValue().getType()); - assertEquals(ConnectorJobOutput.OutputType.CHECK_CONNECTION, output.getOutputType()); - assertEquals(StandardCheckConnectionOutput.Status.SUCCEEDED, output.getCheckConnection().getStatus()); - } - - @Test - void testStartWithWorkloadMissingOutput() throws Exception { - final CheckConnectionInput input = getCheckInput(); - - when(jobOutputDocStore.read(WORKLOAD_ID)).thenReturn(Optional.empty()); - - final ConnectorJobOutput output = checkConnectionActivity.runWithWorkload(input); - verify(workloadIdGenerator).generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT); - verify(workloadApi).workloadCreate(workloadCaptor.capture()); - assertEquals(WORKLOAD_ID, workloadCaptor.getValue().getWorkloadId()); - assertEquals(WorkloadType.CHECK, workloadCaptor.getValue().getType()); - assertEquals(ConnectorJobOutput.OutputType.CHECK_CONNECTION, output.getOutputType()); - assertEquals(StandardCheckConnectionOutput.Status.FAILED, output.getCheckConnection().getStatus()); - } - - private CheckConnectionInput getCheckInput() { - final CheckConnectionInput input = new CheckConnectionInput(); - input.setJobRunConfig(new JobRunConfig().withJobId(JOB_ID).withAttemptId(ATTEMPT_NUMBER)); - input.setCheckConnectionInput(new StandardCheckConnectionInput() - .withActorType(ActorType.SOURCE) - .withActorContext( - new ActorContext().withActorDefinitionId(ACTOR_DEFINITION_ID) - .withWorkspaceId(WORKSPACE_ID))); - input.setLauncherConfig(new IntegrationLauncherConfig().withConnectionId(CONNECTION_ID).withPriority(WorkloadPriority.DEFAULT)); - - return input; - } - - private Workload getWorkloadWithStatus(WorkloadStatus status) { - return new Workload( - "", - new ArrayList<>(), - "", - "", - "", - WorkloadType.CHECK, - UUID.randomUUID(), - null, - status, - null, - null, - null, - null, - ""); - } - -} diff --git a/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt new file mode 100644 index 00000000000..fba741337f9 --- /dev/null +++ b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/check/connection/CheckConnectionActivityTest.kt @@ -0,0 +1,153 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ +package io.airbyte.workers.temporal.check.connection + +import io.airbyte.api.client.AirbyteApiClient +import io.airbyte.api.client.model.generated.Geography +import io.airbyte.commons.logging.DEFAULT_LOG_FILENAME +import io.airbyte.commons.logging.LogClientManager +import io.airbyte.config.ActorContext +import io.airbyte.config.ActorType +import io.airbyte.config.ConnectorJobOutput +import io.airbyte.config.StandardCheckConnectionInput +import io.airbyte.config.StandardCheckConnectionOutput +import io.airbyte.config.WorkloadPriority +import io.airbyte.featureflag.TestClient +import io.airbyte.featureflag.WorkloadCheckFrequencyInSeconds +import io.airbyte.persistence.job.models.IntegrationLauncherConfig +import io.airbyte.persistence.job.models.JobRunConfig +import io.airbyte.workers.models.CheckConnectionInput +import io.airbyte.workers.sync.WorkloadClient +import io.airbyte.workers.workload.WorkloadIdGenerator +import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest +import io.airbyte.workload.api.client.model.generated.WorkloadType +import io.mockk.CapturingSlot +import io.mockk.every +import io.mockk.mockk +import io.mockk.mockkStatic +import io.mockk.slot +import io.mockk.spyk +import io.mockk.verify +import io.temporal.activity.Activity +import io.temporal.activity.ActivityExecutionContext +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.nio.file.Path +import java.util.Optional +import java.util.UUID + +class CheckConnectionActivityTest { + private val workspaceRoot = Path.of("workspace-root") + private val airbyteApiClient: AirbyteApiClient = mockk() + private val workloadClient: WorkloadClient = mockk() + private val workloadIdGenerator: WorkloadIdGenerator = mockk() + private val featureFlagClient: TestClient = mockk() + private val logClientManager: LogClientManager = mockk() + private val executionContext: ActivityExecutionContext = + mockk { + every { heartbeat(null) } returns Unit + } + private lateinit var createReqSlot: CapturingSlot + private lateinit var checkConnectionActivity: CheckConnectionActivityImpl + + @BeforeEach + fun init() { + checkConnectionActivity = + spyk( + CheckConnectionActivityImpl( + workspaceRoot, + airbyteApiClient, + featureFlagClient, + workloadClient, + workloadIdGenerator, + mockk(relaxed = true), + mockk(), + logClientManager, + ), + ) + + every { featureFlagClient.intVariation(WorkloadCheckFrequencyInSeconds, any()) } returns WORKLOAD_CHECK_FREQUENCY_IN_SECONDS + every { workloadIdGenerator.generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT) } returns WORKLOAD_ID + every { checkConnectionActivity.getGeography(Optional.of(CONNECTION_ID), Optional.of(WORKSPACE_ID)) } returns Geography.US + every { logClientManager.fullLogPath(any()) } answers { Path.of(invocation.args[0].toString(), DEFAULT_LOG_FILENAME).toString() } + + mockkStatic(Activity::class) + + every { Activity.getExecutionContext() } returns executionContext + + createReqSlot = slot() + every { + workloadClient.runWorkloadWithCancellationHeartbeat( + capture(createReqSlot), + WORKLOAD_CHECK_FREQUENCY_IN_SECONDS, + executionContext, + ) + } returns Unit + } + + @Test + fun `runWithWorkload happy path`() { + val input = checkInput + every { workloadClient.getConnectorJobOutput(WORKLOAD_ID, any()) } returns + ConnectorJobOutput() + .withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) + .withCheckConnection( + StandardCheckConnectionOutput() + .withStatus(StandardCheckConnectionOutput.Status.SUCCEEDED), + ) + + val output = checkConnectionActivity.runWithWorkload(input) + verify { workloadIdGenerator.generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT) } + verify { workloadClient.runWorkloadWithCancellationHeartbeat(any(), WORKLOAD_CHECK_FREQUENCY_IN_SECONDS, executionContext) } + assertEquals(WORKLOAD_ID, createReqSlot.captured.workloadId) + assertEquals(WorkloadType.CHECK, createReqSlot.captured.type) + assertEquals(ConnectorJobOutput.OutputType.CHECK_CONNECTION, output.outputType) + assertEquals(StandardCheckConnectionOutput.Status.SUCCEEDED, output.checkConnection.status) + } + + @Test + fun `runWithWorkload missing output`() { + val input = checkInput + every { workloadClient.getConnectorJobOutput(WORKLOAD_ID, any()) } returns + ConnectorJobOutput().withOutputType(ConnectorJobOutput.OutputType.CHECK_CONNECTION) + .withCheckConnection( + StandardCheckConnectionOutput() + .withStatus(StandardCheckConnectionOutput.Status.FAILED) + .withMessage("missing output"), + ) + val output = checkConnectionActivity.runWithWorkload(input) + verify { workloadIdGenerator.generateCheckWorkloadId(ACTOR_DEFINITION_ID, JOB_ID, ATTEMPT_NUMBER_AS_INT) } + assertEquals(WORKLOAD_ID, createReqSlot.captured.workloadId) + assertEquals(WorkloadType.CHECK, createReqSlot.captured.type) + assertEquals(ConnectorJobOutput.OutputType.CHECK_CONNECTION, output.outputType) + assertEquals(StandardCheckConnectionOutput.Status.FAILED, output.checkConnection.status) + } + + companion object { + private val ACTOR_DEFINITION_ID = UUID.randomUUID() + private const val ATTEMPT_NUMBER = 42L + private val ATTEMPT_NUMBER_AS_INT = Math.toIntExact(ATTEMPT_NUMBER) + private val CONNECTION_ID = UUID.randomUUID() + private const val JOB_ID = "jobId" + private const val WORKLOAD_ID = "workloadId" + private val WORKSPACE_ID = UUID.randomUUID() + private const val WORKLOAD_CHECK_FREQUENCY_IN_SECONDS = 10 + + private val checkInput: CheckConnectionInput + get() { + val input = CheckConnectionInput() + input.jobRunConfig = JobRunConfig().withJobId(JOB_ID).withAttemptId(ATTEMPT_NUMBER) + input.checkConnectionInput = + StandardCheckConnectionInput() + .withActorType(ActorType.SOURCE) + .withActorContext( + ActorContext().withActorDefinitionId(ACTOR_DEFINITION_ID) + .withWorkspaceId(WORKSPACE_ID), + ) + input.launcherConfig = IntegrationLauncherConfig().withConnectionId(CONNECTION_ID).withPriority(WorkloadPriority.DEFAULT) + return input + } + } +} diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt similarity index 68% rename from airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt rename to airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt index fb8a4fc12c6..57c5d6a840b 100644 --- a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt +++ b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogActivityTest.kt @@ -14,21 +14,22 @@ import io.airbyte.config.StandardDiscoverCatalogInput import io.airbyte.config.WorkloadPriority import io.airbyte.featureflag.FeatureFlagClient import io.airbyte.featureflag.TestClient +import io.airbyte.featureflag.WorkloadCheckFrequencyInSeconds import io.airbyte.persistence.job.models.IntegrationLauncherConfig import io.airbyte.persistence.job.models.JobRunConfig import io.airbyte.workers.models.DiscoverCatalogInput import io.airbyte.workers.sync.WorkloadClient import io.airbyte.workers.temporal.discover.catalog.DiscoverCatalogActivityImpl.DISCOVER_CATALOG_SNAP_DURATION -import io.airbyte.workers.workload.JobOutputDocStore import io.airbyte.workers.workload.WorkloadIdGenerator -import io.airbyte.workload.api.client.WorkloadApiClient -import io.airbyte.workload.api.client.generated.WorkloadApi -import io.airbyte.workload.api.client.model.generated.Workload -import io.airbyte.workload.api.client.model.generated.WorkloadStatus +import io.airbyte.workload.api.client.model.generated.WorkloadCreateRequest import io.airbyte.workload.api.client.model.generated.WorkloadType +import io.mockk.CapturingSlot import io.mockk.every import io.mockk.mockk +import io.mockk.slot import io.mockk.spyk +import io.mockk.verify +import io.temporal.activity.ActivityExecutionContext import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.BeforeEach import org.junit.jupiter.params.ParameterizedTest @@ -41,17 +42,19 @@ class DiscoverCatalogActivityTest { private val workspaceRoot: Path = Path.of("workspace-root") private val airbyteApiClient: AirbyteApiClient = mockk() private val featureFlagClient: FeatureFlagClient = spyk(TestClient()) - private val workloadApi: WorkloadApi = mockk() private val connectionApi: ConnectionApi = mockk() - private val workloadApiClient: WorkloadApiClient = mockk() + private val workloadClient: WorkloadClient = mockk() private val workloadIdGenerator: WorkloadIdGenerator = mockk() - private val jobOutputDocStore: JobOutputDocStore = mockk() private val logClientManager: LogClientManager = mockk() + private val executionContext: ActivityExecutionContext = + mockk { + every { heartbeat(null) } returns Unit + } + private lateinit var createReqSlot: CapturingSlot private lateinit var discoverCatalogActivity: DiscoverCatalogActivityImpl @BeforeEach fun init() { - every { workloadApiClient.workloadApi }.returns(workloadApi) every { airbyteApiClient.connectionApi }.returns(connectionApi) discoverCatalogActivity = spyk( @@ -59,18 +62,28 @@ class DiscoverCatalogActivityTest { workspaceRoot, airbyteApiClient, featureFlagClient, - WorkloadClient(workloadApiClient, jobOutputDocStore), + workloadClient, workloadIdGenerator, logClientManager, ), ) - every { discoverCatalogActivity.activityContext } returns mockk() + every { discoverCatalogActivity.activityContext } returns executionContext every { logClientManager.fullLogPath(any()) } answers { Path.of(invocation.args[0].toString(), DEFAULT_LOG_FILENAME).toString() } + every { featureFlagClient.intVariation(WorkloadCheckFrequencyInSeconds, any()) } returns WORKLOAD_CHECK_FREQUENCY_IN_SECONDS + + createReqSlot = slot() + every { + workloadClient.runWorkloadWithCancellationHeartbeat( + capture(createReqSlot), + WORKLOAD_CHECK_FREQUENCY_IN_SECONDS, + executionContext, + ) + } returns Unit } @ParameterizedTest - @ValueSource(booleans = [ true, false ]) - fun runWithWorkload(withNewWorkloadName: Boolean) { + @ValueSource(booleans = [true, false]) + fun runWithWorkload(runAsPartOfSync: Boolean) { val jobId = "123" val attemptNumber = 456 val actorDefinitionId = UUID.randomUUID() @@ -91,27 +104,34 @@ class DiscoverCatalogActivityTest { .withActorDefinitionId(actorDefinitionId) .withActorId(actorId), ) - .withManual(!withNewWorkloadName) + .withManual(!runAsPartOfSync) input.launcherConfig = IntegrationLauncherConfig().withConnectionId( connectionId, ).withWorkspaceId(workspaceId).withPriority(WorkloadPriority.DEFAULT) - if (withNewWorkloadName) { + + if (runAsPartOfSync) { every { workloadIdGenerator.generateDiscoverWorkloadIdV2WithSnap(eq(actorId), any(), eq(DISCOVER_CATALOG_SNAP_DURATION)) }.returns(workloadId) } else { every { workloadIdGenerator.generateDiscoverWorkloadId(actorDefinitionId, jobId, attemptNumber) }.returns(workloadId) } every { discoverCatalogActivity.getGeography(Optional.of(connectionId), Optional.of(workspaceId)) }.returns(Geography.AUTO) - every { workloadApi.workloadCreate(any()) }.returns(Unit) - every { - workloadApi.workloadGet(workloadId) - }.returns(Workload(workloadId, listOf(), "", "", "auto", WorkloadType.DISCOVER, UUID.randomUUID(), status = WorkloadStatus.SUCCESS)) val output = ConnectorJobOutput().withOutputType(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID) .withDiscoverCatalogId(UUID.randomUUID()) - every { jobOutputDocStore.read(workloadId) }.returns(Optional.of(output)) - val actualOutput = discoverCatalogActivity.runWithWorkload(input) - assertEquals(output, actualOutput) + every { workloadClient.getConnectorJobOutput(workloadId, any()) } returns output + + val result = discoverCatalogActivity.runWithWorkload(input) + + verify { workloadClient.runWorkloadWithCancellationHeartbeat(any(), WORKLOAD_CHECK_FREQUENCY_IN_SECONDS, executionContext) } + assertEquals(workloadId, createReqSlot.captured.workloadId) + assertEquals(WorkloadType.DISCOVER, createReqSlot.captured.type) + assertEquals(ConnectorJobOutput.OutputType.DISCOVER_CATALOG_ID, output.outputType) + assertEquals(output, result) + } + + companion object { + private const val WORKLOAD_CHECK_FREQUENCY_IN_SECONDS = 10 } } diff --git a/airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityTest.kt b/airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityTest.kt similarity index 100% rename from airbyte-workers/src/test/java/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityTest.kt rename to airbyte-workers/src/test/kotlin/io/airbyte/workers/temporal/discover/catalog/DiscoverCatalogHelperActivityTest.kt From b3ca65ef8a2a08fbda5a71d909a8b7a853139766 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Fri, 27 Sep 2024 16:21:43 -0400 Subject: [PATCH 10/36] refactor: connector rollout api endpoint updates (#14145) --- .../server-api/src/main/openapi/config.yaml | 30 +++++++++++++------ .../handlers/ConnectorRolloutHandler.kt | 8 +++++ .../handlers/ConnectorRolloutHandlerTest.kt | 15 +++++----- .../rollout/client/ConnectorRolloutCLI.kt | 6 ++++ .../ConnectorRolloutActivityInputFinalize.kt | 1 + .../ConnectorRolloutActivityInputRollout.kt | 1 + .../ConnectorRolloutActivityInputStart.kt | 1 + .../activities/DoRolloutActivityImpl.kt | 1 + .../activities/FinalizeRolloutActivityImpl.kt | 1 + .../activities/StartRolloutActivityImpl.kt | 1 + .../apis/ConnectorRolloutApiController.java | 3 +- .../src/main/resources/application.yml | 4 +-- 12 files changed, 52 insertions(+), 20 deletions(-) diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index 756e55ec0b2..e15410859c1 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -5056,12 +5056,6 @@ paths: # Connector Rollouts /v1/connector_rollout/list_all: post: - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/ConnectorRolloutListAllRequestBody" - responses: "200": content: @@ -7368,9 +7362,6 @@ components: type: string format: uuid - ConnectorRolloutListAllRequestBody: - type: object - ConnectorRolloutListByActorDefinitionIdRequestBody: type: object required: @@ -7461,6 +7452,9 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + updated_by: + type: string + format: uuid ConnectorRolloutStartResponse: type: object @@ -7486,6 +7480,9 @@ components: $ref: "#/components/schemas/ActorId" rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + updated_by: + type: string + format: uuid ConnectorRolloutResponse: type: object @@ -7513,6 +7510,9 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + updated_by: + type: string + format: uuid ConnectorRolloutFinalizeResponse: type: object @@ -7528,6 +7528,7 @@ components: - docker_repository - docker_image_tag - actor_definition_id + - updated_by properties: docker_repository: type: string @@ -7536,6 +7537,9 @@ components: actor_definition_id: type: string format: uuid + updated_by: + type: string + format: uuid ConnectorRolloutManualRolloutRequestBody: type: object @@ -7544,6 +7548,7 @@ components: - docker_image_tag - actor_definition_id - id + - updated_by - actor_ids properties: docker_repository: @@ -7556,6 +7561,9 @@ components: id: type: string format: uuid + updated_by: + type: string + format: uuid actor_ids: type: array items: @@ -7568,6 +7576,7 @@ components: - docker_image_tag - actor_definition_id - id + - updated_by - state properties: docker_repository: @@ -7580,6 +7589,9 @@ components: id: type: string format: uuid + updated_by: + type: string + format: uuid state: $ref: "#/components/schemas/ConnectorRolloutStateTerminal" error_msg: diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt index b2948843970..98a2d76c2b3 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt @@ -70,6 +70,11 @@ open class ConnectorRolloutHandler .hasBreakingChanges(connectorRollout.hasBreakingChanges) .rolloutStrategy(rolloutStrategy) .maxStepWaitTimeMins(connectorRollout.maxStepWaitTimeMins?.toInt()) + .updatedAt(connectorRollout.updatedAt?.let { unixTimestampToOffsetDateTime(it) }) + .createdAt(connectorRollout.createdAt?.let { unixTimestampToOffsetDateTime(it) }) + .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) + .errorMsg(connectorRollout.errorMsg) + .failedReason(connectorRollout.failedReason) .updatedBy( connectorRollout.rolloutStrategy?.let { strategy -> connectorRollout.updatedBy?.let { updatedBy -> @@ -150,6 +155,7 @@ open class ConnectorRolloutHandler dockerRepository: String, actorDefinitionId: UUID, dockerImageTag: String, + updatedBy: UUID, ): ConnectorRollout { val actorDefinitionVersion = actorDefinitionService.getActorDefinitionVersion( @@ -185,6 +191,7 @@ open class ConnectorRolloutHandler .withActorDefinitionId(actorDefinitionId) .withReleaseCandidateVersionId(actorDefinitionVersion.get().versionId) .withInitialVersionId(initialVersion.get().versionId) + .withUpdatedBy(updatedBy) .withState(ConnectorEnumRolloutState.INITIALIZED) .withHasBreakingChanges(false) connectorRolloutService.writeConnectorRollout(connectorRollout) @@ -358,6 +365,7 @@ open class ConnectorRolloutHandler connectorRolloutWorkflowStart.dockerRepository, connectorRolloutWorkflowStart.actorDefinitionId, connectorRolloutWorkflowStart.dockerImageTag, + connectorRolloutWorkflowStart.updatedBy, ) connectorRolloutClient.startWorkflow( ConnectorRolloutActivityInputStart( diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt index b2598992674..338e62ce606 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt @@ -85,6 +85,7 @@ internal class ConnectorRolloutHandlerTest { val DOCKER_IMAGE_TAG = "0.1" val ACTOR_DEFINITION_ID = UUID.randomUUID() val RELEASE_CANDIDATE_VERSION_ID = UUID.randomUUID() + val UPDATED_BY = UUID.randomUUID() @JvmStatic fun validInsertStates() = listOf(ConnectorEnumRolloutState.CANCELED_ROLLED_BACK) @@ -607,6 +608,7 @@ internal class ConnectorRolloutHandlerTest { dockerRepository = DOCKER_REPOSITORY dockerImageTag = DOCKER_IMAGE_TAG actorDefinitionId = ACTOR_DEFINITION_ID + updatedBy = UPDATED_BY } val connectorRollout = createMockConnectorRollout(rolloutId) @@ -693,7 +695,6 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput updates rollout when already exists in INITIALIZED state`() { val rolloutId = UUID.randomUUID() - val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -709,7 +710,7 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(any()) } returns Optional.of(createMockActorDefinitionVersion()) - val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, actorDefinitionId, dockerImageTag) + val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, actorDefinitionId, dockerImageTag, UPDATED_BY) assertEquals(connectorRollout.id, result.id) verifyAll { @@ -722,7 +723,6 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput throws when initial version is not found`() { val rolloutId = UUID.randomUUID() - val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -740,9 +740,10 @@ internal class ConnectorRolloutHandlerTest { assertThrows { connectorRolloutHandler.getOrCreateAndValidateManualStartInput( - dockerRepository, + DOCKER_REPOSITORY, actorDefinitionId, dockerImageTag, + UPDATED_BY, ) } } @@ -762,7 +763,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(ACTOR_DEFINITION_ID) } returns Optional.of(actorDefinitionVersion) every { connectorRolloutService.writeConnectorRollout(any()) } returns connectorRollout - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) verifyAll { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) @@ -777,7 +778,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.empty() assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) } verify { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } @@ -797,7 +798,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.of(actorDefinitionVersion) assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) } verifyAll { diff --git a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt index 7ab12f65342..8806170e8f2 100644 --- a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt +++ b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt @@ -111,6 +111,7 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, + UUID(0, 0), ) startWorkflow(rolloutClient, startInput) } @@ -132,6 +133,7 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, + UUID(0, 0), rolloutId!!, actorIds!!, ) @@ -144,6 +146,7 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, + UUID(0, 0), ConnectorRolloutStateTerminal.valueOf(ConnectorRolloutFinalState.SUCCEEDED.toString()), null, null, @@ -157,6 +160,7 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, + UUID(0, 0), ConnectorRolloutStateTerminal.FAILED_ROLLED_BACK, null, null, @@ -170,6 +174,7 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, + UUID(0, 0), ConnectorRolloutStateTerminal.CANCELED_ROLLED_BACK, null, null, @@ -187,6 +192,7 @@ class ConnectorRolloutCLI : Runnable { client: ConnectorRolloutApi, input: ConnectorRolloutManualStartRequestBody, ) { + logFormatted("CLI.startWorkflow using client", client) logFormatted("CLI.startWorkflow with input", input) logFormatted("CLI Rollout workflows status", client.manualStartConnectorRollout(input)) } diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt index 9cea8ec7dcf..2e59ea2c3b7 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt @@ -11,4 +11,5 @@ data class ConnectorRolloutActivityInputFinalize( var result: ConnectorRolloutFinalState, var errorMsg: String? = null, var failedReason: String? = null, + var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt index 4d18042bb85..2b928988d05 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt @@ -8,4 +8,5 @@ data class ConnectorRolloutActivityInputRollout( var actorDefinitionId: UUID, var rolloutId: UUID, var actorIds: List, + var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt index e2fafe9e3cc..6caa7e66bab 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt @@ -7,4 +7,5 @@ data class ConnectorRolloutActivityInputStart( var dockerImageTag: String, var actorDefinitionId: UUID, var rolloutId: UUID, + var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt index 71829ab1416..6c907eca037 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt @@ -37,6 +37,7 @@ class DoRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : Do input.rolloutId, input.actorIds, ConnectorRolloutStrategy.MANUAL, + input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt index ebf5798b749..973ebfc87b7 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt @@ -47,6 +47,7 @@ class FinalizeRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient ConnectorRolloutStrategy.MANUAL, errorMsg, failureReason, + input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt index 5088da35dcf..4297b37b261 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt @@ -38,6 +38,7 @@ class StartRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : input.rolloutId, workflowRunId, ConnectorRolloutStrategy.MANUAL, + input.updatedBy, ) return try { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java index 7dced66f8ad..e7b6c24e32f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java @@ -34,7 +34,6 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; -import jakarta.validation.Valid; import java.util.UUID; @Controller("/api/v1/connector_rollout") @@ -133,7 +132,7 @@ public ConnectorRolloutListResponse getConnectorRolloutsList(@Body final Connect @Secured({ADMIN}) @ExecuteOn(AirbyteTaskExecutors.IO) @Override - public ConnectorRolloutListResponse getConnectorRolloutsListAll(@Valid Object body) { + public ConnectorRolloutListResponse getConnectorRolloutsListAll() { return ApiHelper.execute(() -> { final var connectorRollouts = connectorRolloutHandler.listConnectorRollouts(); return new ConnectorRolloutListResponse().connectorRollouts(connectorRollouts); diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index ded8be3a392..d1fee022f5d 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -349,8 +349,8 @@ temporal: enabled: ${TEMPORAL_CLOUD_ENABLED:false} host: ${TEMPORAL_CLOUD_HOST:} namespace: ${TEMPORAL_CLOUD_NAMESPACE:} - connectorRollout: - namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:connector-rollout-stage.ebc2e} + connector-rollout: + namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:} host: ${TEMPORAL_HOST:`airbyte-temporal:7233`} retention: ${TEMPORAL_HISTORY_RETENTION_IN_DAYS:30} sdk: From 8cf3f66d361ac91e0bc85995df474c4658430797 Mon Sep 17 00:00:00 2001 From: Ella Rohm-Ensing Date: Fri, 27 Sep 2024 16:35:42 -0700 Subject: [PATCH 11/36] feat: add comments to track builder PRs (#14166) --- .../resources/contribution_templates/pull-request-edit.md.peb | 2 ++ .../contribution_templates/pull-request-new-connector.md.peb | 2 ++ 2 files changed, 4 insertions(+) diff --git a/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb b/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb index e3216ac6c19..ae69d7a8623 100644 --- a/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb +++ b/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-edit.md.peb @@ -12,3 +12,5 @@ The contributor provided the following description of the change: - [ ] Ensure connector docs are up to date with any changes - [ ] Run `/format-fix` to resolve any formatting errors - [ ] Click into the CI workflows that wait for a maintainer to run them, which should trigger CI runs + + diff --git a/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-new-connector.md.peb b/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-new-connector.md.peb index af598ef830f..ae0c0e8cca5 100644 --- a/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-new-connector.md.peb +++ b/airbyte-connector-builder-server/src/main/resources/contribution_templates/pull-request-new-connector.md.peb @@ -31,3 +31,5 @@ Contributor-provided connector description: - [ ] Run `/format-fix` to resolve any formatting errors - [ ] Help the contributor upload an icon that follows our icon design guidelines - [ ] Click into the CI workflows that wait for a maintainer to run them, which should trigger CI runs + + From 43133d8978ef7e65ab096aecb57234d108b402b3 Mon Sep 17 00:00:00 2001 From: Michael Siega <109092231+mfsiega-airbyte@users.noreply.github.com> Date: Mon, 30 Sep 2024 12:51:23 +0200 Subject: [PATCH 12/36] revert: "refactor: connector rollout api endpoint updates (#14145)" (#14173) --- .../server-api/src/main/openapi/config.yaml | 30 ++++++------------- .../handlers/ConnectorRolloutHandler.kt | 8 ----- .../handlers/ConnectorRolloutHandlerTest.kt | 15 +++++----- .../rollout/client/ConnectorRolloutCLI.kt | 6 ---- .../ConnectorRolloutActivityInputFinalize.kt | 1 - .../ConnectorRolloutActivityInputRollout.kt | 1 - .../ConnectorRolloutActivityInputStart.kt | 1 - .../activities/DoRolloutActivityImpl.kt | 1 - .../activities/FinalizeRolloutActivityImpl.kt | 1 - .../activities/StartRolloutActivityImpl.kt | 1 - .../apis/ConnectorRolloutApiController.java | 3 +- .../src/main/resources/application.yml | 4 +-- 12 files changed, 20 insertions(+), 52 deletions(-) diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index e15410859c1..756e55ec0b2 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -5056,6 +5056,12 @@ paths: # Connector Rollouts /v1/connector_rollout/list_all: post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectorRolloutListAllRequestBody" + responses: "200": content: @@ -7362,6 +7368,9 @@ components: type: string format: uuid + ConnectorRolloutListAllRequestBody: + type: object + ConnectorRolloutListByActorDefinitionIdRequestBody: type: object required: @@ -7452,9 +7461,6 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" - updated_by: - type: string - format: uuid ConnectorRolloutStartResponse: type: object @@ -7480,9 +7486,6 @@ components: $ref: "#/components/schemas/ActorId" rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" - updated_by: - type: string - format: uuid ConnectorRolloutResponse: type: object @@ -7510,9 +7513,6 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" - updated_by: - type: string - format: uuid ConnectorRolloutFinalizeResponse: type: object @@ -7528,7 +7528,6 @@ components: - docker_repository - docker_image_tag - actor_definition_id - - updated_by properties: docker_repository: type: string @@ -7537,9 +7536,6 @@ components: actor_definition_id: type: string format: uuid - updated_by: - type: string - format: uuid ConnectorRolloutManualRolloutRequestBody: type: object @@ -7548,7 +7544,6 @@ components: - docker_image_tag - actor_definition_id - id - - updated_by - actor_ids properties: docker_repository: @@ -7561,9 +7556,6 @@ components: id: type: string format: uuid - updated_by: - type: string - format: uuid actor_ids: type: array items: @@ -7576,7 +7568,6 @@ components: - docker_image_tag - actor_definition_id - id - - updated_by - state properties: docker_repository: @@ -7589,9 +7580,6 @@ components: id: type: string format: uuid - updated_by: - type: string - format: uuid state: $ref: "#/components/schemas/ConnectorRolloutStateTerminal" error_msg: diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt index 98a2d76c2b3..b2948843970 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt @@ -70,11 +70,6 @@ open class ConnectorRolloutHandler .hasBreakingChanges(connectorRollout.hasBreakingChanges) .rolloutStrategy(rolloutStrategy) .maxStepWaitTimeMins(connectorRollout.maxStepWaitTimeMins?.toInt()) - .updatedAt(connectorRollout.updatedAt?.let { unixTimestampToOffsetDateTime(it) }) - .createdAt(connectorRollout.createdAt?.let { unixTimestampToOffsetDateTime(it) }) - .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) - .errorMsg(connectorRollout.errorMsg) - .failedReason(connectorRollout.failedReason) .updatedBy( connectorRollout.rolloutStrategy?.let { strategy -> connectorRollout.updatedBy?.let { updatedBy -> @@ -155,7 +150,6 @@ open class ConnectorRolloutHandler dockerRepository: String, actorDefinitionId: UUID, dockerImageTag: String, - updatedBy: UUID, ): ConnectorRollout { val actorDefinitionVersion = actorDefinitionService.getActorDefinitionVersion( @@ -191,7 +185,6 @@ open class ConnectorRolloutHandler .withActorDefinitionId(actorDefinitionId) .withReleaseCandidateVersionId(actorDefinitionVersion.get().versionId) .withInitialVersionId(initialVersion.get().versionId) - .withUpdatedBy(updatedBy) .withState(ConnectorEnumRolloutState.INITIALIZED) .withHasBreakingChanges(false) connectorRolloutService.writeConnectorRollout(connectorRollout) @@ -365,7 +358,6 @@ open class ConnectorRolloutHandler connectorRolloutWorkflowStart.dockerRepository, connectorRolloutWorkflowStart.actorDefinitionId, connectorRolloutWorkflowStart.dockerImageTag, - connectorRolloutWorkflowStart.updatedBy, ) connectorRolloutClient.startWorkflow( ConnectorRolloutActivityInputStart( diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt index 338e62ce606..b2598992674 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt @@ -85,7 +85,6 @@ internal class ConnectorRolloutHandlerTest { val DOCKER_IMAGE_TAG = "0.1" val ACTOR_DEFINITION_ID = UUID.randomUUID() val RELEASE_CANDIDATE_VERSION_ID = UUID.randomUUID() - val UPDATED_BY = UUID.randomUUID() @JvmStatic fun validInsertStates() = listOf(ConnectorEnumRolloutState.CANCELED_ROLLED_BACK) @@ -608,7 +607,6 @@ internal class ConnectorRolloutHandlerTest { dockerRepository = DOCKER_REPOSITORY dockerImageTag = DOCKER_IMAGE_TAG actorDefinitionId = ACTOR_DEFINITION_ID - updatedBy = UPDATED_BY } val connectorRollout = createMockConnectorRollout(rolloutId) @@ -695,6 +693,7 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput updates rollout when already exists in INITIALIZED state`() { val rolloutId = UUID.randomUUID() + val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -710,7 +709,7 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(any()) } returns Optional.of(createMockActorDefinitionVersion()) - val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, actorDefinitionId, dockerImageTag, UPDATED_BY) + val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, actorDefinitionId, dockerImageTag) assertEquals(connectorRollout.id, result.id) verifyAll { @@ -723,6 +722,7 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput throws when initial version is not found`() { val rolloutId = UUID.randomUUID() + val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -740,10 +740,9 @@ internal class ConnectorRolloutHandlerTest { assertThrows { connectorRolloutHandler.getOrCreateAndValidateManualStartInput( - DOCKER_REPOSITORY, + dockerRepository, actorDefinitionId, dockerImageTag, - UPDATED_BY, ) } } @@ -763,7 +762,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(ACTOR_DEFINITION_ID) } returns Optional.of(actorDefinitionVersion) every { connectorRolloutService.writeConnectorRollout(any()) } returns connectorRollout - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) verifyAll { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) @@ -778,7 +777,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.empty() assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } verify { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } @@ -798,7 +797,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.of(actorDefinitionVersion) assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } verifyAll { diff --git a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt index 8806170e8f2..7ab12f65342 100644 --- a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt +++ b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt @@ -111,7 +111,6 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, - UUID(0, 0), ) startWorkflow(rolloutClient, startInput) } @@ -133,7 +132,6 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, - UUID(0, 0), rolloutId!!, actorIds!!, ) @@ -146,7 +144,6 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, - UUID(0, 0), ConnectorRolloutStateTerminal.valueOf(ConnectorRolloutFinalState.SUCCEEDED.toString()), null, null, @@ -160,7 +157,6 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, - UUID(0, 0), ConnectorRolloutStateTerminal.FAILED_ROLLED_BACK, null, null, @@ -174,7 +170,6 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, - UUID(0, 0), ConnectorRolloutStateTerminal.CANCELED_ROLLED_BACK, null, null, @@ -192,7 +187,6 @@ class ConnectorRolloutCLI : Runnable { client: ConnectorRolloutApi, input: ConnectorRolloutManualStartRequestBody, ) { - logFormatted("CLI.startWorkflow using client", client) logFormatted("CLI.startWorkflow with input", input) logFormatted("CLI Rollout workflows status", client.manualStartConnectorRollout(input)) } diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt index 2e59ea2c3b7..9cea8ec7dcf 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt @@ -11,5 +11,4 @@ data class ConnectorRolloutActivityInputFinalize( var result: ConnectorRolloutFinalState, var errorMsg: String? = null, var failedReason: String? = null, - var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt index 2b928988d05..4d18042bb85 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt @@ -8,5 +8,4 @@ data class ConnectorRolloutActivityInputRollout( var actorDefinitionId: UUID, var rolloutId: UUID, var actorIds: List, - var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt index 6caa7e66bab..e2fafe9e3cc 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt @@ -7,5 +7,4 @@ data class ConnectorRolloutActivityInputStart( var dockerImageTag: String, var actorDefinitionId: UUID, var rolloutId: UUID, - var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt index 6c907eca037..71829ab1416 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt @@ -37,7 +37,6 @@ class DoRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : Do input.rolloutId, input.actorIds, ConnectorRolloutStrategy.MANUAL, - input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt index 973ebfc87b7..ebf5798b749 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt @@ -47,7 +47,6 @@ class FinalizeRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient ConnectorRolloutStrategy.MANUAL, errorMsg, failureReason, - input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt index 4297b37b261..5088da35dcf 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt @@ -38,7 +38,6 @@ class StartRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : input.rolloutId, workflowRunId, ConnectorRolloutStrategy.MANUAL, - input.updatedBy, ) return try { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java index e7b6c24e32f..7dced66f8ad 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java @@ -34,6 +34,7 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; +import jakarta.validation.Valid; import java.util.UUID; @Controller("/api/v1/connector_rollout") @@ -132,7 +133,7 @@ public ConnectorRolloutListResponse getConnectorRolloutsList(@Body final Connect @Secured({ADMIN}) @ExecuteOn(AirbyteTaskExecutors.IO) @Override - public ConnectorRolloutListResponse getConnectorRolloutsListAll() { + public ConnectorRolloutListResponse getConnectorRolloutsListAll(@Valid Object body) { return ApiHelper.execute(() -> { final var connectorRollouts = connectorRolloutHandler.listConnectorRollouts(); return new ConnectorRolloutListResponse().connectorRollouts(connectorRollouts); diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index d1fee022f5d..ded8be3a392 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -349,8 +349,8 @@ temporal: enabled: ${TEMPORAL_CLOUD_ENABLED:false} host: ${TEMPORAL_CLOUD_HOST:} namespace: ${TEMPORAL_CLOUD_NAMESPACE:} - connector-rollout: - namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:} + connectorRollout: + namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:connector-rollout-stage.ebc2e} host: ${TEMPORAL_HOST:`airbyte-temporal:7233`} retention: ${TEMPORAL_HISTORY_RETENTION_IN_DAYS:30} sdk: From 1f5b6ff63ca389bb2e54555be47df913eabcd419 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Mon, 30 Sep 2024 10:58:36 -0400 Subject: [PATCH 13/36] chore: helm values for the connector rollout worker (#14076) --- charts/airbyte-connector-rollout-worker/values.yaml | 2 +- charts/airbyte/values.yaml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/charts/airbyte-connector-rollout-worker/values.yaml b/charts/airbyte-connector-rollout-worker/values.yaml index 94c4bdf9c8e..72c294f8268 100644 --- a/charts/airbyte-connector-rollout-worker/values.yaml +++ b/charts/airbyte-connector-rollout-worker/values.yaml @@ -82,7 +82,7 @@ global: ## jobs.kube.images.curl [string] curl image used by the job pod curl: "" -enabled: false +enabled: true ## connector-rollout-worker.replicaCount Number of connector-rollout-worker replicas replicaCount: 1 diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 6a1dccdd8c8..2fd5ca5ccea 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -1007,7 +1007,7 @@ workload-launcher: ## @section Rollout Worker Parameters connector-rollout-worker: - enabled: false + enabled: true # -- Number of connector rollout worker replicas replicaCount: 1 From 67bebbec81d289e10f2d8dd2d9d0131be07c7ad1 Mon Sep 17 00:00:00 2001 From: Chandler Prall Date: Mon, 30 Sep 2024 11:21:32 -0400 Subject: [PATCH 14/36] fix: show the field hashing header (#14174) --- .../SyncCatalogTable/SyncCatalogTable.tsx | 21 ++++++++++--------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx index d5b276f632c..2e3867af55e 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/SyncCatalogTable.tsx @@ -214,17 +214,18 @@ export const SyncCatalogTable: FC = () => { }), columnHelper.display({ id: "hashing", + header: () => ( + + + + + + + + + ), cell: ({ row }) => - isNamespaceRow(row) ? ( - - - - - - - - - ) : isStreamRow(row) ? null : ( + isNamespaceRow(row) || isStreamRow(row) ? null : ( ), meta: { From 97033c1a6f12920d26d6551655d9bc357b069969 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Mon, 30 Sep 2024 12:08:36 -0400 Subject: [PATCH 15/36] fix: disable connector rollout worker on oss (#14178) --- charts/airbyte/values.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/charts/airbyte/values.yaml b/charts/airbyte/values.yaml index 2fd5ca5ccea..6a1dccdd8c8 100644 --- a/charts/airbyte/values.yaml +++ b/charts/airbyte/values.yaml @@ -1007,7 +1007,7 @@ workload-launcher: ## @section Rollout Worker Parameters connector-rollout-worker: - enabled: true + enabled: false # -- Number of connector rollout worker replicas replicaCount: 1 From 5eebaa8afb4e653e155c1854627f18a7078d7b1b Mon Sep 17 00:00:00 2001 From: Lake Mossman Date: Mon, 30 Sep 2024 09:31:23 -0700 Subject: [PATCH 16/36] design: vertically center InfoTooltip icon (#14165) --- .../src/components/LabeledControl/ControlLabels.module.scss | 4 ---- .../src/components/LabeledControl/ControlLabels.tsx | 4 ++-- .../src/components/ui/Tooltip/InfoTooltip.module.scss | 2 +- 3 files changed, 3 insertions(+), 7 deletions(-) diff --git a/airbyte-webapp/src/components/LabeledControl/ControlLabels.module.scss b/airbyte-webapp/src/components/LabeledControl/ControlLabels.module.scss index 2c0bdbd7231..e431612fffe 100644 --- a/airbyte-webapp/src/components/LabeledControl/ControlLabels.module.scss +++ b/airbyte-webapp/src/components/LabeledControl/ControlLabels.module.scss @@ -19,7 +19,3 @@ .tooltip { word-wrap: break-word; } - -.tooltipContainer { - align-self: flex-start; -} diff --git a/airbyte-webapp/src/components/LabeledControl/ControlLabels.tsx b/airbyte-webapp/src/components/LabeledControl/ControlLabels.tsx index 16399e602e1..24b356a8fe3 100644 --- a/airbyte-webapp/src/components/LabeledControl/ControlLabels.tsx +++ b/airbyte-webapp/src/components/LabeledControl/ControlLabels.tsx @@ -38,7 +38,7 @@ const ControlLabels = React.forwardRef {props.label} {props.infoTooltipContent && ( - + {props.infoTooltipContent} )} @@ -49,7 +49,7 @@ const ControlLabels = React.forwardRef - {props.labelAction &&
{props.labelAction}
} + {props.labelAction &&
{props.labelAction}
} {props.children} diff --git a/airbyte-webapp/src/components/ui/Tooltip/InfoTooltip.module.scss b/airbyte-webapp/src/components/ui/Tooltip/InfoTooltip.module.scss index 5c96d1ea16b..93f01728a5f 100644 --- a/airbyte-webapp/src/components/ui/Tooltip/InfoTooltip.module.scss +++ b/airbyte-webapp/src/components/ui/Tooltip/InfoTooltip.module.scss @@ -7,7 +7,7 @@ $icon-size: 14px; $icon-padding: 5px; .container { - display: inline-block; + display: flex; color: colors.$grey; padding-left: $icon-padding; } From 5bc5182164c1c0c4253465086a0c97750a40af16 Mon Sep 17 00:00:00 2001 From: Chandler Prall Date: Mon, 30 Sep 2024 12:53:51 -0400 Subject: [PATCH 17/36] chore: tighten up copy around marketplace connectors when creating a connection (#14177) --- airbyte-webapp/src/locales/en.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index c398d251af8..deba17513c5 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -1262,7 +1262,7 @@ "connector.supportLevel.custom": "Custom", "connector.connectorNameAndVersion": "{connectorName} v{version}", "connector.supportLevel.certified.description": "Airbyte Connectors are actively maintained and supported by the Airbyte team and maintain a high quality bar. They are production ready.", - "connector.supportLevel.community.description": "Marketplace connectors are maintained by the Airbyte community until they become Airbyte Connectors. Airbyte does not offer support SLAs around them, and encourages caution when using them in production.", + "connector.supportLevel.community.description": "Marketplace connectors are maintained by the Airbyte community. Airbyte does not offer support SLAs around them, and encourages caution when using them in production.", "connector.supportLevel.archived.description": "Archived connectors have been removed from the Airbyte Registry due to low quality or low usage.", "connector.supportLevel.custom.description": "Custom connectors are added to the workspace manually by the user.", "connector.connectorsInDevelopment.docLink": "See our documentation for more details.", From 223db00cc0908f253a05dedb19dca15d8b9623f0 Mon Sep 17 00:00:00 2001 From: Bryce Groff Date: Mon, 30 Sep 2024 10:51:38 -0700 Subject: [PATCH 18/36] refactor: remove references of the OAuthService from the ConfigRespository (#14167) --- .../config/DatabaseBeanFactory.java | 3 - .../io/airbyte/bootloader/BootloaderTest.java | 13 ---- .../commons/server/handlers/OAuthHandler.java | 20 +++--- .../server/handlers/OAuthHandlerTest.java | 12 ++-- .../config/persistence/ConfigRepository.java | 69 ------------------- ...finitionBreakingChangePersistenceTest.java | 5 -- .../ActorDefinitionPersistenceTest.java | 5 -- ...ActorDefinitionVersionPersistenceTest.java | 5 -- .../persistence/ConfigInjectionTest.java | 5 -- .../ConfigRepositoryE2EReadWriteTest.java | 26 +++---- .../ConnectorMetadataPersistenceTest.java | 5 -- .../PermissionPersistenceTest.java | 5 -- .../StandardSyncPersistenceTest.java | 5 -- .../persistence/StatePersistenceTest.java | 9 --- .../SyncOperationPersistenceTest.java | 5 -- .../persistence/UserPersistenceTest.java | 5 -- .../persistence/WorkspaceFilterTest.java | 5 -- .../persistence/WorkspacePersistenceTest.java | 5 -- .../cron/config/DatabaseBeanFactory.java | 3 - airbyte-oauth/build.gradle.kts | 1 + .../FacebookOAuthFlowIntegrationTest.java | 6 +- .../GithubOAuthFlowIntegrationTest.java | 6 +- .../GitlabOAuthFlowIntegrationTest.java | 9 ++- .../IntercomOAuthFlowIntegrationTest.java | 9 ++- .../LinkedinAdsOAuthFlowIntegrationTest.java | 8 +-- .../PipeDriveOAuthFlowIntegrationTest.java | 8 +-- .../QuickbooksOAuthFlowIntegrationTest.java | 8 +-- .../SalesforceOAuthFlowIntegrationTest.java | 8 +-- .../SlackOAuthFlowIntegrationTest.java | 8 +-- ...chatMarketingOAuthFlowIntegrationTest.java | 8 +-- .../SquareOAuthFlowIntegrationTest.java | 6 +- .../SurveymonkeyOAuthFlowIntegrationTest.java | 6 +- .../TrelloOAuthFlowIntegrationTest.java | 8 +-- .../TypeformOAuthFlowIntegrationTest.java | 9 ++- .../HubspotOAuthFlowIntegrationTest.java | 8 +-- .../oauth/flows/OAuthFlowIntegrationTest.java | 11 ++- .../GoogleAdsOAuthFlowIntegrationTest.java | 11 ++- ...ogleAnalyticsOAuthFlowIntegrationTest.java | 8 +-- ...SearchConsoleOAuthFlowIntegrationTest.java | 11 ++- .../GoogleSheetsOAuthFlowIntegrationTest.java | 8 +-- .../oauth/flows/BaseOAuthFlowTest.java | 15 ++-- .../oauth/flows/TrelloOAuthFlowTest.java | 8 +-- .../job-persistence/build.gradle.kts | 1 + .../job/factory/OAuthConfigSupplier.java | 14 ++-- .../job/factory/OAuthConfigSupplierTest.java | 9 ++- .../server/config/DatabaseBeanFactory.java | 3 - .../server/config/TemporalBeanFactory.java | 6 +- 47 files changed, 142 insertions(+), 289 deletions(-) diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java index b1ddc19aa4d..1fe8d79b83e 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java @@ -17,7 +17,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.ConnectorBuilderService; import io.airbyte.data.services.DestinationService; -import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.OperationService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -128,7 +127,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin final ConnectionService connectionService, final ConnectorBuilderService connectorBuilderService, final DestinationService destinationService, - final OAuthService oauthService, final OperationService operationService, final SourceService sourceService, final WorkspaceService workspaceService) { @@ -138,7 +136,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin connectionService, connectorBuilderService, destinationService, - oauthService, operationService, sourceService, workspaceService); diff --git a/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java index d331211969e..35be98efe01 100644 --- a/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java +++ b/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java @@ -42,7 +42,6 @@ import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl; @@ -183,10 +182,6 @@ void testBootloaderAppBlankDb() throws Exception { connectionService, connectorBuilderService, destinationService, - new OAuthServiceJooqImpl(configDatabase, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(configDatabase), sourceService, workspaceService); @@ -296,10 +291,6 @@ void testRequiredVersionUpgradePredicate() throws Exception { connectionService, connectorBuilderService, destinationService, - new OAuthServiceJooqImpl(configDatabase, - featureFlagClient, - mock(SecretsRepositoryReader.class), - mock(SecretPersistenceConfigService.class)), new OperationServiceJooqImpl(configDatabase), sourceService, workspaceService); @@ -421,10 +412,6 @@ void testPostLoadExecutionExecutes() throws Exception { mock(SecretPersistenceConfigService.class), connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(configDatabase, - featureFlagClient, - mock(SecretsRepositoryReader.class), - mock(SecretPersistenceConfigService.class)), new OperationServiceJooqImpl(configDatabase), new SourceServiceJooqImpl(configDatabase, featureFlagClient, diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java index a0b54ef8c63..f8c5bddb8e7 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/OAuthHandler.java @@ -419,27 +419,27 @@ public void revokeSourceOauthTokens(final RevokeSourceOauthTokensRequest revokeS } public void setSourceInstancewideOauthParams(final SetInstancewideSourceOauthParamsRequestBody requestBody) - throws JsonValidationException, IOException { - final SourceOAuthParameter param = configRepository + throws IOException { + final SourceOAuthParameter param = oAuthService .getSourceOAuthParamByDefinitionIdOptional(null, requestBody.getSourceDefinitionId()) .orElseGet(() -> new SourceOAuthParameter().withOauthParameterId(UUID.randomUUID())) .withConfiguration(Jsons.jsonNode(requestBody.getParams())) .withSourceDefinitionId(requestBody.getSourceDefinitionId()); // TODO validate requestBody.getParams() against // spec.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerInputSpecification() - configRepository.writeSourceOAuthParam(param); + oAuthService.writeSourceOAuthParam(param); } public void setDestinationInstancewideOauthParams(final SetInstancewideDestinationOauthParamsRequestBody requestBody) - throws JsonValidationException, IOException { - final DestinationOAuthParameter param = configRepository + throws IOException { + final DestinationOAuthParameter param = oAuthService .getDestinationOAuthParamByDefinitionIdOptional(null, requestBody.getDestinationDefinitionId()) .orElseGet(() -> new DestinationOAuthParameter().withOauthParameterId(UUID.randomUUID())) .withConfiguration(Jsons.jsonNode(requestBody.getParams())) .withDestinationDefinitionId(requestBody.getDestinationDefinitionId()); // TODO validate requestBody.getParams() against // spec.getAdvancedAuth().getOauthConfigSpecification().getCompleteOauthServerInputSpecification() - configRepository.writeDestinationOAuthParam(param); + oAuthService.writeDestinationOAuthParam(param); } private JsonNode getOAuthInputConfigurationForConsent(final ConnectorSpecification spec, @@ -606,14 +606,14 @@ public void setSourceWorkspaceOverrideOauthParams(final WorkspaceOverrideOauthPa final JsonNode sanitizedOauthConfiguration = sanitizeOauthConfiguration(workspaceId, connectorSpecification, oauthParamConfiguration); - final SourceOAuthParameter param = configRepository + final SourceOAuthParameter param = oAuthService .getSourceOAuthParamByDefinitionIdOptional(workspaceId, definitionId) .orElseGet(() -> new SourceOAuthParameter().withOauthParameterId(UUID.randomUUID())) .withConfiguration(sanitizedOauthConfiguration) .withSourceDefinitionId(definitionId) .withWorkspaceId(workspaceId); - configRepository.writeSourceOAuthParam(param); + oAuthService.writeSourceOAuthParam(param); } public void setDestinationWorkspaceOverrideOauthParams(final WorkspaceOverrideOauthParamsRequestBody requestBody) @@ -631,14 +631,14 @@ public void setDestinationWorkspaceOverrideOauthParams(final WorkspaceOverrideOa final JsonNode sanitizedOauthConfiguration = sanitizeOauthConfiguration(workspaceId, connectorSpecification, oauthParamConfiguration); - final DestinationOAuthParameter param = configRepository + final DestinationOAuthParameter param = oAuthService .getDestinationOAuthParamByDefinitionIdOptional(workspaceId, definitionId) .orElseGet(() -> new DestinationOAuthParameter().withOauthParameterId(UUID.randomUUID())) .withConfiguration(sanitizedOauthConfiguration) .withDestinationDefinitionId(definitionId) .withWorkspaceId(workspaceId); - configRepository.writeDestinationOAuthParam(param); + oAuthService.writeDestinationOAuthParam(param); } /** diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java index 9c72478644d..d29b024ea04 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/OAuthHandlerTest.java @@ -98,7 +98,7 @@ void setSourceInstancewideOauthParams() throws JsonValidationException, IOExcept handler.setSourceInstancewideOauthParams(actualRequest); final ArgumentCaptor argument = ArgumentCaptor.forClass(SourceOAuthParameter.class); - Mockito.verify(configRepository).writeSourceOAuthParam(argument.capture()); + Mockito.verify(oauthService).writeSourceOAuthParam(argument.capture()); assertEquals(Jsons.jsonNode(params), argument.getValue().getConfiguration()); assertEquals(sourceDefId, argument.getValue().getSourceDefinitionId()); } @@ -115,7 +115,7 @@ void resetSourceInstancewideOauthParams() throws JsonValidationException, IOExce handler.setSourceInstancewideOauthParams(firstRequest); final UUID oauthParameterId = UUID.randomUUID(); - when(configRepository.getSourceOAuthParamByDefinitionIdOptional(null, sourceDefId)) + when(oauthService.getSourceOAuthParamByDefinitionIdOptional(null, sourceDefId)) .thenReturn(Optional.of(new SourceOAuthParameter().withOauthParameterId(oauthParameterId))); final Map secondParams = new HashMap<>(); @@ -127,7 +127,7 @@ void resetSourceInstancewideOauthParams() throws JsonValidationException, IOExce handler.setSourceInstancewideOauthParams(secondRequest); final ArgumentCaptor argument = ArgumentCaptor.forClass(SourceOAuthParameter.class); - Mockito.verify(configRepository, Mockito.times(2)).writeSourceOAuthParam(argument.capture()); + Mockito.verify(oauthService, Mockito.times(2)).writeSourceOAuthParam(argument.capture()); final List capturedValues = argument.getAllValues(); assertEquals(Jsons.jsonNode(firstParams), capturedValues.get(0).getConfiguration()); assertEquals(Jsons.jsonNode(secondParams), capturedValues.get(1).getConfiguration()); @@ -150,7 +150,7 @@ void setDestinationInstancewideOauthParams() throws JsonValidationException, IOE handler.setDestinationInstancewideOauthParams(actualRequest); final ArgumentCaptor argument = ArgumentCaptor.forClass(DestinationOAuthParameter.class); - Mockito.verify(configRepository).writeDestinationOAuthParam(argument.capture()); + Mockito.verify(oauthService).writeDestinationOAuthParam(argument.capture()); assertEquals(Jsons.jsonNode(params), argument.getValue().getConfiguration()); assertEquals(destinationDefId, argument.getValue().getDestinationDefinitionId()); } @@ -167,7 +167,7 @@ void resetDestinationInstancewideOauthParams() throws JsonValidationException, I handler.setDestinationInstancewideOauthParams(firstRequest); final UUID oauthParameterId = UUID.randomUUID(); - when(configRepository.getDestinationOAuthParamByDefinitionIdOptional(null, destinationDefId)) + when(oauthService.getDestinationOAuthParamByDefinitionIdOptional(null, destinationDefId)) .thenReturn(Optional.of(new DestinationOAuthParameter().withOauthParameterId(oauthParameterId))); final Map secondParams = new HashMap<>(); @@ -179,7 +179,7 @@ void resetDestinationInstancewideOauthParams() throws JsonValidationException, I handler.setDestinationInstancewideOauthParams(secondRequest); final ArgumentCaptor argument = ArgumentCaptor.forClass(DestinationOAuthParameter.class); - Mockito.verify(configRepository, Mockito.times(2)).writeDestinationOAuthParam(argument.capture()); + Mockito.verify(oauthService, Mockito.times(2)).writeDestinationOAuthParam(argument.capture()); final List capturedValues = argument.getAllValues(); assertEquals(Jsons.jsonNode(firstParams), capturedValues.get(0).getConfiguration()); assertEquals(Jsons.jsonNode(secondParams), capturedValues.get(1).getConfiguration()); diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index 614373794db..c22417547ed 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -15,10 +15,8 @@ import io.airbyte.config.ActorDefinitionVersion; import io.airbyte.config.ConfigSchema; import io.airbyte.config.DestinationConnection; -import io.airbyte.config.DestinationOAuthParameter; import io.airbyte.config.Geography; import io.airbyte.config.SourceConnection; -import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.StandardDestinationDefinition; import io.airbyte.config.StandardSourceDefinition; import io.airbyte.config.StandardSync; @@ -31,7 +29,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.ConnectorBuilderService; import io.airbyte.data.services.DestinationService; -import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.OperationService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -140,7 +137,6 @@ public record ResourcesByUserQueryPaginated( private final ConnectionService connectionService; private final ConnectorBuilderService connectorBuilderService; private final DestinationService destinationService; - private final OAuthService oAuthService; private final OperationService operationService; private final SourceService sourceService; private final WorkspaceService workspaceService; @@ -152,7 +148,6 @@ public ConfigRepository(final ActorDefinitionService actorDefinitionService, final ConnectionService connectionService, final ConnectorBuilderService connectorBuilderService, final DestinationService destinationService, - final OAuthService oAuthService, final OperationService operationService, final SourceService sourceService, final WorkspaceService workspaceService) { @@ -161,7 +156,6 @@ public ConfigRepository(final ActorDefinitionService actorDefinitionService, this.connectionService = connectionService; this.connectorBuilderService = connectorBuilderService; this.destinationService = destinationService; - this.oAuthService = oAuthService; this.operationService = operationService; this.sourceService = sourceService; this.workspaceService = workspaceService; @@ -1210,57 +1204,6 @@ public void deleteStandardSyncOperation(final UUID standardSyncOperationId) thro operationService.deleteStandardSyncOperation(standardSyncOperationId); } - /** - * Get source oauth parameter. - * - * @param workspaceId workspace id - * @param sourceDefinitionId source definition id - * @return source oauth parameter - * @throws IOException if there is an issue while interacting with db. - */ - @Deprecated - public Optional getSourceOAuthParamByDefinitionIdOptional(final UUID workspaceId, final UUID sourceDefinitionId) - throws IOException { - return oAuthService.getSourceOAuthParamByDefinitionIdOptional(workspaceId, sourceDefinitionId); - } - - /** - * Write source oauth param. - * - * @param sourceOAuthParameter source oauth param - * @throws IOException if there is an issue while interacting with db. - */ - @Deprecated - public void writeSourceOAuthParam(final SourceOAuthParameter sourceOAuthParameter) throws IOException { - oAuthService.writeSourceOAuthParam(sourceOAuthParameter); - } - - /** - * Get destination oauth parameter. - * - * @param workspaceId workspace id - * @param destinationDefinitionId destination definition id - * @return oauth parameters if present - * @throws IOException if there is an issue while interacting with db. - */ - @Deprecated - public Optional getDestinationOAuthParamByDefinitionIdOptional(final UUID workspaceId, - final UUID destinationDefinitionId) - throws IOException { - return oAuthService.getDestinationOAuthParamByDefinitionIdOptional(workspaceId, destinationDefinitionId); - } - - /** - * Write destination oauth param. - * - * @param destinationOAuthParameter destination oauth parameter - * @throws IOException if there is an issue while interacting with db. - */ - @Deprecated - public void writeDestinationOAuthParam(final DestinationOAuthParameter destinationOAuthParameter) throws IOException { - oAuthService.writeDestinationOAuthParam(destinationOAuthParameter); - } - /** * Pair of source and its associated definition. *

@@ -1731,16 +1674,4 @@ public Set listEarlySyncJobs(final int freeUsageInterval, final int jobsFe return connectionService.listEarlySyncJobs(freeUsageInterval, jobsFetchRange); } - @Deprecated - public Optional getSourceOAuthParameterOptional(final UUID workspaceId, final UUID sourceDefinitionId) - throws IOException { - return oAuthService.getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId); - } - - @Deprecated - public Optional getDestinationOAuthParameterOptional(final UUID workspaceId, final UUID sourceDefinitionId) - throws IOException { - return oAuthService.getDestinationOAuthParameterOptional(workspaceId, sourceDefinitionId); - } - } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java index 1b59e424a90..b1151efed2c 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java @@ -28,7 +28,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl; @@ -142,10 +141,6 @@ void setup() throws SQLException, JsonValidationException, IOException { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java index 46c9fb310b2..73ae6900ba4 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java @@ -37,7 +37,6 @@ import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -99,10 +98,6 @@ void setup() throws SQLException, IOException { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java index 7f86e77831b..c7ac4a5361c 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java @@ -36,7 +36,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl; @@ -139,10 +138,6 @@ void beforeEach() throws Exception { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java index afa0cafdcac..5087f34de79 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java @@ -28,7 +28,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl; @@ -81,10 +80,6 @@ void beforeEach() throws Exception { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java index d16a72e1159..33c98945889 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java @@ -44,6 +44,7 @@ import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ActorDefinitionService; import io.airbyte.data.services.ConnectionService; +import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.OrganizationService; import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; @@ -99,6 +100,7 @@ class ConfigRepositoryE2EReadWriteTest extends BaseConfigDatabaseTest { private static final String CONFIG_HASH = "ConfigHash"; private ConfigRepository configRepository; + private OAuthService oauthService; @BeforeEach void setup() throws IOException, JsonValidationException, SQLException { @@ -129,10 +131,6 @@ void setup() throws IOException, JsonValidationException, SQLException { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, @@ -148,6 +146,10 @@ void setup() throws IOException, JsonValidationException, SQLException { secretPersistenceConfigService))); OrganizationService organizationService = new OrganizationServiceJooqImpl(database); organizationService.writeOrganization(MockData.defaultOrganization()); + oauthService = spy(new OAuthServiceJooqImpl(database, + featureFlagClient, + secretsRepositoryReader, + secretPersistenceConfigService)); for (final StandardWorkspace workspace : MockData.standardWorkspaces()) { configRepository.writeStandardWorkspaceNoSecrets(workspace); } @@ -177,10 +179,10 @@ void setup() throws IOException, JsonValidationException, SQLException { } for (final SourceOAuthParameter oAuthParameter : MockData.sourceOauthParameters()) { - configRepository.writeSourceOAuthParam(oAuthParameter); + oauthService.writeSourceOAuthParam(oAuthParameter); } for (final DestinationOAuthParameter oAuthParameter : MockData.destinationOauthParameters()) { - configRepository.writeDestinationOAuthParam(oAuthParameter); + oauthService.writeDestinationOAuthParam(oAuthParameter); } database.transaction(ctx -> ctx.truncate(ACTOR_DEFINITION_WORKSPACE_GRANT).execute()); @@ -640,7 +642,7 @@ void testWorkspaceCanUseDefinition() throws IOException { void testGetDestinationOAuthByDefinitionId() throws IOException { final DestinationOAuthParameter destinationOAuthParameter = MockData.destinationOauthParameters().get(0); - final Optional result = configRepository.getDestinationOAuthParamByDefinitionIdOptional( + final Optional result = oauthService.getDestinationOAuthParamByDefinitionIdOptional( destinationOAuthParameter.getWorkspaceId(), destinationOAuthParameter.getDestinationDefinitionId()); assertTrue(result.isPresent()); assertEquals(destinationOAuthParameter, result.get()); @@ -651,17 +653,17 @@ void testMissingDestinationOAuthByDefinitionId() throws IOException { final UUID missingId = UUID.fromString("fc59cfa0-06de-4c8b-850b-46d4cfb65629"); final DestinationOAuthParameter destinationOAuthParameter = MockData.destinationOauthParameters().get(0); Optional result = - configRepository.getDestinationOAuthParamByDefinitionIdOptional(destinationOAuthParameter.getWorkspaceId(), missingId); + oauthService.getDestinationOAuthParamByDefinitionIdOptional(destinationOAuthParameter.getWorkspaceId(), missingId); assertFalse(result.isPresent()); - result = configRepository.getDestinationOAuthParamByDefinitionIdOptional(missingId, destinationOAuthParameter.getDestinationDefinitionId()); + result = oauthService.getDestinationOAuthParamByDefinitionIdOptional(missingId, destinationOAuthParameter.getDestinationDefinitionId()); assertFalse(result.isPresent()); } @Test void testGetSourceOAuthByDefinitionId() throws IOException { final SourceOAuthParameter sourceOAuthParameter = MockData.sourceOauthParameters().get(0); - final Optional result = configRepository.getSourceOAuthParamByDefinitionIdOptional(sourceOAuthParameter.getWorkspaceId(), + final Optional result = oauthService.getSourceOAuthParamByDefinitionIdOptional(sourceOAuthParameter.getWorkspaceId(), sourceOAuthParameter.getSourceDefinitionId()); assertTrue(result.isPresent()); assertEquals(sourceOAuthParameter, result.get()); @@ -672,10 +674,10 @@ void testMissingSourceOAuthByDefinitionId() throws IOException { final UUID missingId = UUID.fromString("fc59cfa0-06de-4c8b-850b-46d4cfb65629"); final SourceOAuthParameter sourceOAuthParameter = MockData.sourceOauthParameters().get(0); Optional result = - configRepository.getSourceOAuthParamByDefinitionIdOptional(sourceOAuthParameter.getWorkspaceId(), missingId); + oauthService.getSourceOAuthParamByDefinitionIdOptional(sourceOAuthParameter.getWorkspaceId(), missingId); assertFalse(result.isPresent()); - result = configRepository.getSourceOAuthParamByDefinitionIdOptional(missingId, sourceOAuthParameter.getSourceDefinitionId()); + result = oauthService.getSourceOAuthParamByDefinitionIdOptional(missingId, sourceOAuthParameter.getSourceDefinitionId()); assertFalse(result.isPresent()); } diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java index a275992b1df..203a5c759eb 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java @@ -43,7 +43,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -112,10 +111,6 @@ void setup() throws SQLException, JsonValidationException, IOException { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java index aeb9dac0742..ef5391ba428 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java @@ -22,7 +22,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl; @@ -70,10 +69,6 @@ private void setupTestData() throws Exception { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java index 06c64ab7701..4a41eac7398 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java @@ -55,7 +55,6 @@ import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -125,10 +124,6 @@ void beforeEach() throws Exception { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java index 9660e1ea154..7adbf293d46 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java @@ -33,7 +33,6 @@ import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -105,10 +104,6 @@ private UUID setupTestData() throws JsonValidationException, IOException { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, @@ -937,10 +932,6 @@ private UUID setupSecondConnection() throws JsonValidationException, IOException secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java index 37700f7f630..c3afd6f8c58 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java @@ -24,7 +24,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -84,10 +83,6 @@ void beforeEach() throws Exception { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java index 1a83d9341be..8eb353b8302 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java @@ -26,7 +26,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -76,10 +75,6 @@ void setup() { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java index 4bf275c328b..e93b37e89c6 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java @@ -23,7 +23,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OrganizationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; @@ -153,10 +152,6 @@ void beforeEach() { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java index 50b082ac02c..e21b6cba216 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java @@ -42,7 +42,6 @@ import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.OAuthServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.SourceServiceJooqImpl; import io.airbyte.data.services.impls.jooq.WorkspaceServiceJooqImpl; @@ -105,10 +104,6 @@ void setup() throws Exception { secretPersistenceConfigService, connectionService, actorDefinitionVersionUpdater), - new OAuthServiceJooqImpl(database, - featureFlagClient, - secretsRepositoryReader, - secretPersistenceConfigService), new OperationServiceJooqImpl(database), new SourceServiceJooqImpl(database, featureFlagClient, diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java b/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java index 635849ba70f..639b7ab8f63 100644 --- a/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java +++ b/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java @@ -12,7 +12,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.ConnectorBuilderService; import io.airbyte.data.services.DestinationService; -import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.OperationService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -92,7 +91,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin final ConnectionService connectionService, final ConnectorBuilderService connectorBuilderService, final DestinationService destinationService, - final OAuthService oauthService, final OperationService operationService, final SourceService sourceService, final WorkspaceService workspaceService) { @@ -102,7 +100,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin connectionService, connectorBuilderService, destinationService, - oauthService, operationService, sourceService, workspaceService); diff --git a/airbyte-oauth/build.gradle.kts b/airbyte-oauth/build.gradle.kts index 965929c61c0..094042d7d4a 100644 --- a/airbyte-oauth/build.gradle.kts +++ b/airbyte-oauth/build.gradle.kts @@ -16,6 +16,7 @@ dependencies { implementation(project(":oss:airbyte-api:problems-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-config:config-models")) + implementation(project(":oss:airbyte-data")) implementation(project(":oss:airbyte-json-validation")) implementation(libs.airbyte.protocol) diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java index 9526d6762ef..562466b48a3 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/FacebookOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.oauth.flows.OAuthFlowIntegrationTest; import io.airbyte.validation.json.JsonValidationException; @@ -40,7 +40,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new FacebookMarketingOAuthFlow(httpClient); } @@ -69,7 +69,7 @@ public void testFullFacebookOAuthFlow() throws InterruptedException, ConfigNotFo .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(param)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(param)); final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, param.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java index 8361ffc12cd..82f3805bb52 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GithubOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -40,7 +40,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new GithubOAuthFlow(httpClient); } @@ -70,7 +70,7 @@ public void testFullGithubOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = flow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java index 572c7ce5b9d..84bb7880f62 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/GitlabOAuthFlowIntegrationTest.java @@ -12,8 +12,7 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -40,7 +39,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, HttpClient httpClient) { return new GitlabOAuthFlow(httpClient); } @@ -55,7 +54,7 @@ public void setup() throws IOException { } @Test - public void testFullGitlabOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { + public void testFullGitlabOAuthFlow() throws InterruptedException, IOException, JsonValidationException { int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); @@ -69,7 +68,7 @@ public void testFullGitlabOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = flow.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java index 85d4f53c2b9..4e6a46b4bbe 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/IntercomOAuthFlowIntegrationTest.java @@ -12,8 +12,7 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -40,7 +39,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new IntercomOAuthFlow(httpClient); } @@ -56,7 +55,7 @@ public void setup() throws IOException { } @Test - public void testFullIntercomOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { + public void testFullIntercomOAuthFlow() throws InterruptedException, IOException, JsonValidationException { int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); @@ -72,7 +71,7 @@ public void testFullIntercomOAuthFlow() throws InterruptedException, ConfigNotFo .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = flow.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java index bcdc4dcc8ab..2e517ef39ed 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/LinkedinAdsOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -44,7 +44,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new LinkedinAdsOAuthFlow(httpClient); } @@ -64,9 +64,9 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = - getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, + getFlowImplementation(oauthService, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java index cff7108fa66..f753cd56805 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/PipeDriveOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -45,7 +45,7 @@ protected int getServerListeningPort() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new PipeDriveOAuthFlow(httpClient); } @@ -63,8 +63,8 @@ public void testFullPipeDriveOAuthFlow() throws InterruptedException, ConfigNotF .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); - final String url = getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + final String url = getFlowImplementation(oauthService, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); waitForResponse(20); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java index c2269d2a38b..6e81b1835dd 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/QuickbooksOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -44,7 +44,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new QuickbooksOAuthFlow(httpClient); } @@ -64,9 +64,9 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = - getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, + getFlowImplementation(oauthService, httpClient).getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java index dec2227c906..9b5b46426b9 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SalesforceOAuthFlowIntegrationTest.java @@ -17,7 +17,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.io.OutputStream; @@ -44,11 +44,11 @@ public class SalesforceOAuthFlowIntegrationTest { private static final String REDIRECT_URL = "http://localhost:8000/code"; private static final Path CREDENTIALS_PATH = Path.of("secrets/salesforce.json"); - private ConfigRepository configRepository; private SalesforceOAuthFlow salesforceOAuthFlow; private HttpServer server; private ServerHandler serverHandler; private HttpClient httpClient; + private OAuthService oAuthService; @BeforeEach public void setup() throws IOException { @@ -56,7 +56,7 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); salesforceOAuthFlow = new SalesforceOAuthFlow(httpClient); @@ -88,7 +88,7 @@ public void testFullSalesforceOAuthFlow() throws InterruptedException, ConfigNot .put("client_id", clientId) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = salesforceOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java index d17f59211b3..10738354ee6 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SlackOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -40,7 +40,7 @@ protected String getRedirectUrl() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new SlackOAuthFlow(httpClient); } @@ -59,8 +59,8 @@ public void testFullSlackOAuthFlow() throws InterruptedException, ConfigNotFound .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); - final String url = getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + final String url = getFlowImplementation(oauthService, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); // TODO: To automate, start a selenium job to navigate to the Consent URL and click on allowing diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java index 79ed80e1161..a8e4fc2a059 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SnapchatMarketingOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -45,7 +45,7 @@ protected int getServerListeningPort() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new SnapchatMarketingOAuthFlow(httpClient); } @@ -63,8 +63,8 @@ public void testFullSnapchatMarketingOAuthFlow() throws InterruptedException, Co .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); - final String url = getFlowImplementation(configRepository, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + final String url = getFlowImplementation(oauthService, httpClient).getSourceConsentUrl(workspaceId, definitionId, getRedirectUrl(), Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); waitForResponse(20); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java index a9b0dcb308f..71bde8c574f 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SquareOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -40,7 +40,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new SquareOAuthFlow(httpClient); } @@ -72,7 +72,7 @@ public void testFullSquareOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = flow.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java index 015cc2178b9..391805c4358 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/SurveymonkeyOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -39,7 +39,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new SurveymonkeyOAuthFlow(httpClient); } @@ -68,7 +68,7 @@ public void testFullSurveymonkeyOAuthFlow() throws InterruptedException, ConfigN .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = flow.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java index 23d0cef8432..9a8fd4224d6 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TrelloOAuthFlowIntegrationTest.java @@ -17,7 +17,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.io.OutputStream; @@ -43,10 +43,10 @@ public class TrelloOAuthFlowIntegrationTest { private static final String REDIRECT_URL = "http://localhost:8000/code"; private static final Path CREDENTIALS_PATH = Path.of("secrets/trello.json"); - private ConfigRepository configRepository; private TrelloOAuthFlow trelloOAuthFlow; private HttpServer server; private ServerHandler serverHandler; + private OAuthService oAuthService; @BeforeEach public void setup() throws IOException { @@ -54,7 +54,7 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); trelloOAuthFlow = new TrelloOAuthFlow(); server = HttpServer.create(new InetSocketAddress(8000), 0); @@ -85,7 +85,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", clientId) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = trelloOAuthFlow.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TypeformOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TypeformOAuthFlowIntegrationTest.java index c0f03686885..2c80ce6cb05 100644 --- a/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TypeformOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io.airbyte.oauth.flows/TypeformOAuthFlowIntegrationTest.java @@ -12,8 +12,7 @@ import com.google.common.collect.ImmutableMap; import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -40,7 +39,7 @@ protected Path getCredentialsPath() { } @Override - protected OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new TypeformOAuthFlow(httpClient); } @@ -55,7 +54,7 @@ public void setup() throws IOException { } @Test - public void testFullTypeformOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { + public void testFullTypeformOAuthFlow() throws InterruptedException, IOException, JsonValidationException { int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); @@ -69,7 +68,7 @@ public void testFullTypeformOAuthFlow() throws InterruptedException, ConfigNotFo .put("client_id", credentialsJson.get("client_id").asText()) .put("client_secret", credentialsJson.get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = flow.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java index ceb5a90b80f..f7bf18dfbe2 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/HubspotOAuthFlowIntegrationTest.java @@ -13,7 +13,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -39,7 +39,7 @@ protected OAuthFlowImplementation getFlowObject() { } @Override - protected OAuthFlowImplementation getFlowImplementation(final ConfigRepository configRepository, final HttpClient httpClient) { + protected OAuthFlowImplementation getFlowImplementation(final OAuthService oauthService, final HttpClient httpClient) { return new HubspotOAuthFlow(httpClient); } @@ -58,8 +58,8 @@ public void testFullOAuthFlow() throws InterruptedException, ConfigNotFoundExcep .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); - final var flowObject = getFlowImplementation(configRepository, httpClient); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + final var flowObject = getFlowImplementation(oauthService, httpClient); final String url = flowObject.getSourceConsentUrl( workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java index 37ae5160bdd..730ce32e356 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/OAuthFlowIntegrationTest.java @@ -4,12 +4,10 @@ package io.airbyte.oauth.flows; -import static org.mockito.Mockito.mock; - import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; import com.sun.net.httpserver.HttpServer; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.OAuthFlowImplementation; import java.io.IOException; import java.io.OutputStream; @@ -39,7 +37,7 @@ public abstract class OAuthFlowIntegrationTest { protected static final int SERVER_LISTENING_PORT = 80; protected HttpClient httpClient; - protected ConfigRepository configRepository; + protected OAuthService oauthService; protected OAuthFlowImplementation flow; protected HttpServer server; protected ServerHandler serverHandler; @@ -52,7 +50,7 @@ protected String getRedirectUrl() { return REDIRECT_URL; } - protected abstract OAuthFlowImplementation getFlowImplementation(ConfigRepository configRepository, HttpClient httpClient); + protected abstract OAuthFlowImplementation getFlowImplementation(OAuthService oauthService, HttpClient httpClient); @BeforeEach public void setup() throws IOException { @@ -60,9 +58,8 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); - flow = this.getFlowImplementation(configRepository, httpClient); + flow = this.getFlowImplementation(oauthService, httpClient); server = HttpServer.create(new InetSocketAddress(getServerListeningPort()), 0); server.setExecutor(null); // creates a default executor diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java index 8ddfc651dec..ca77e6e6d95 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAdsOAuthFlowIntegrationTest.java @@ -16,8 +16,7 @@ import com.sun.net.httpserver.HttpServer; import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.io.OutputStream; @@ -44,11 +43,11 @@ public class GoogleAdsOAuthFlowIntegrationTest { private static final String REDIRECT_URL = "http://localhost/code"; private static final Path CREDENTIALS_PATH = Path.of("secrets/google_ads.json"); - private ConfigRepository configRepository; private GoogleAdsOAuthFlow googleAdsOAuthFlow; private HttpServer server; private ServerHandler serverHandler; private HttpClient httpClient; + private OAuthService oAuthService; @BeforeEach public void setup() throws IOException { @@ -56,7 +55,7 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); googleAdsOAuthFlow = new GoogleAdsOAuthFlow(httpClient); @@ -73,7 +72,7 @@ void tearDown() { } @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { + public void testFullGoogleOAuthFlow() throws InterruptedException, IOException, JsonValidationException { int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); @@ -87,7 +86,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = googleAdsOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java index b87c1a458a7..5f3255cec4f 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleAnalyticsOAuthFlowIntegrationTest.java @@ -17,7 +17,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.io.OutputStream; @@ -44,11 +44,11 @@ public class GoogleAnalyticsOAuthFlowIntegrationTest { private static final String REDIRECT_URL = "http://localhost/code"; private static final Path CREDENTIALS_PATH = Path.of("secrets/google_analytics.json"); - private ConfigRepository configRepository; private GoogleAnalyticsViewIdOAuthFlow googleAnalyticsViewIdOAuthFlow; private HttpServer server; private ServerHandler serverHandler; private HttpClient httpClient; + private OAuthService oAuthService; @BeforeEach public void setup() throws IOException { @@ -56,7 +56,7 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); googleAnalyticsViewIdOAuthFlow = new GoogleAnalyticsViewIdOAuthFlow(httpClient); @@ -87,7 +87,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = googleAnalyticsViewIdOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java index cd8439d10ed..a1ecc52ed88 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSearchConsoleOAuthFlowIntegrationTest.java @@ -16,8 +16,7 @@ import com.sun.net.httpserver.HttpServer; import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; -import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.io.OutputStream; @@ -44,11 +43,11 @@ public class GoogleSearchConsoleOAuthFlowIntegrationTest { private static final String REDIRECT_URL = "http://localhost/code"; private static final Path CREDENTIALS_PATH = Path.of("secrets/google_search_console.json"); - private ConfigRepository configRepository; private GoogleSearchConsoleOAuthFlow googleSearchConsoleOAuthFlow; private HttpServer server; private ServerHandler serverHandler; private HttpClient httpClient; + private OAuthService oAuthService; @BeforeEach public void setup() throws IOException { @@ -56,7 +55,7 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); googleSearchConsoleOAuthFlow = new GoogleSearchConsoleOAuthFlow(httpClient); @@ -73,7 +72,7 @@ void tearDown() { } @Test - public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoundException, IOException, JsonValidationException { + public void testFullGoogleOAuthFlow() throws InterruptedException, IOException, JsonValidationException { int limit = 20; final UUID workspaceId = UUID.randomUUID(); final UUID definitionId = UUID.randomUUID(); @@ -87,7 +86,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("authorization").get("client_id").asText()) .put("client_secret", credentialsJson.get("authorization").get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = googleSearchConsoleOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java index 6e1f5e71d3e..99ff98a416f 100644 --- a/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java +++ b/airbyte-oauth/src/test-integration/java/io/airbyte/oauth/flows/google/GoogleSheetsOAuthFlowIntegrationTest.java @@ -17,7 +17,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; import java.io.OutputStream; @@ -44,11 +44,11 @@ public class GoogleSheetsOAuthFlowIntegrationTest { private static final String REDIRECT_URL = "http://localhost/code"; private static final Path CREDENTIALS_PATH = Path.of("secrets/google_sheets.json"); - private ConfigRepository configRepository; private GoogleSheetsOAuthFlow googleSheetsOAuthFlow; private HttpServer server; private ServerHandler serverHandler; private HttpClient httpClient; + private OAuthService oAuthService; @BeforeEach public void setup() throws IOException { @@ -56,7 +56,7 @@ public void setup() throws IOException { throw new IllegalStateException( "Must provide path to a oauth credentials file."); } - configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); httpClient = HttpClient.newBuilder().version(HttpClient.Version.HTTP_1_1).build(); googleSheetsOAuthFlow = new GoogleSheetsOAuthFlow(httpClient); @@ -87,7 +87,7 @@ public void testFullGoogleOAuthFlow() throws InterruptedException, ConfigNotFoun .put("client_id", credentialsJson.get("credentials").get("client_id").asText()) .put("client_secret", credentialsJson.get("credentials").get("client_secret").asText()) .build()))); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); final String url = googleSheetsOAuthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, Jsons.emptyObject(), null, sourceOAuthParameter.getConfiguration()); LOGGER.info("Waiting for user consent at: {}", url); diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java index c5a81b1f216..b48d2046ab3 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/BaseOAuthFlowTest.java @@ -19,6 +19,7 @@ import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.BaseOAuthFlow; import io.airbyte.oauth.MoreOAuthParameters; import io.airbyte.protocol.models.OAuthConfigSpecification; @@ -45,6 +46,7 @@ public abstract class BaseOAuthFlowTest { private HttpClient httpClient; private ConfigRepository configRepository; + private OAuthService oAuthService; private BaseOAuthFlow oauthFlow; private UUID workspaceId; @@ -64,6 +66,7 @@ protected ConfigRepository getConfigRepository() { void setup() throws JsonValidationException, IOException { httpClient = mock(HttpClient.class); configRepository = mock(ConfigRepository.class); + oAuthService = mock(OAuthService.class); oauthFlow = getOAuthFlow(); workspaceId = UUID.randomUUID(); @@ -72,12 +75,12 @@ void setup() throws JsonValidationException, IOException { .withOauthParameterId(UUID.randomUUID()) .withSourceDefinitionId(definitionId) .withConfiguration(getOAuthParamConfig()); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); destinationOAuthParameter = new DestinationOAuthParameter() .withOauthParameterId(UUID.randomUUID()) .withDestinationDefinitionId(definitionId) .withConfiguration(getOAuthParamConfig()); - when(configRepository.getDestinationOAuthParameterOptional(any(), any())).thenReturn(Optional.of(destinationOAuthParameter)); + when(oAuthService.getDestinationOAuthParameterOptional(any(), any())).thenReturn(Optional.of(destinationOAuthParameter)); } /** @@ -244,8 +247,8 @@ void testValidateInputOAuthConfigurationFailure() { @Test void testGetConsentUrlEmptyOAuthParameters() throws JsonValidationException, IOException { - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.empty()); - when(configRepository.getDestinationOAuthParameterOptional(any(), any())).thenReturn(Optional.empty()); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.empty()); + when(oAuthService.getDestinationOAuthParameterOptional(any(), any())).thenReturn(Optional.empty()); assertThrows(ResourceNotFoundProblem.class, () -> oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), getoAuthConfigSpecification(), null)); @@ -260,12 +263,12 @@ void testGetConsentUrlIncompleteOAuthParameters() throws IOException, JsonValida .withOauthParameterId(UUID.randomUUID()) .withSourceDefinitionId(definitionId) .withConfiguration(Jsons.emptyObject()); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); DestinationOAuthParameter destinationOAuthParameter = new DestinationOAuthParameter() .withOauthParameterId(UUID.randomUUID()) .withDestinationDefinitionId(definitionId) .withConfiguration(Jsons.emptyObject()); - when(configRepository.getDestinationOAuthParameterOptional(any(), any())).thenReturn(Optional.of(destinationOAuthParameter)); + when(oAuthService.getDestinationOAuthParameterOptional(any(), any())).thenReturn(Optional.of(destinationOAuthParameter)); assertThrows(IllegalArgumentException.class, () -> oauthFlow.getSourceConsentUrl(workspaceId, definitionId, REDIRECT_URL, getInputOAuthConfiguration(), getoAuthConfigSpecification(), sourceOAuthParameter.getConfiguration())); diff --git a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java index f6315ae07df..4027e8e8bbc 100644 --- a/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java +++ b/airbyte-oauth/src/test/java/io/airbyte/oauth/flows/TrelloOAuthFlowTest.java @@ -19,7 +19,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.config.SourceOAuthParameter; import io.airbyte.config.persistence.ConfigNotFoundException; -import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.MoreOAuthParameters; import io.airbyte.validation.json.JsonValidationException; import java.io.IOException; @@ -35,10 +35,10 @@ class TrelloOAuthFlowTest { private UUID workspaceId; private UUID definitionId; - private ConfigRepository configRepository; private TrelloOAuthFlow trelloOAuthFlow; private HttpTransport transport; private SourceOAuthParameter sourceOAuthParameter; + private OAuthService oauthService; @BeforeEach void setup() throws IOException, JsonValidationException { @@ -64,14 +64,14 @@ public LowLevelHttpResponse execute() throws IOException { } }; - configRepository = mock(ConfigRepository.class); + oauthService = mock(OAuthService.class); sourceOAuthParameter = new SourceOAuthParameter() .withSourceDefinitionId(definitionId) .withConfiguration(Jsons.jsonNode(ImmutableMap.builder() .put("client_id", "test_client_id") .put("client_secret", "test_client_secret") .build())); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); + when(oauthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of(sourceOAuthParameter)); trelloOAuthFlow = new TrelloOAuthFlow(transport); } diff --git a/airbyte-persistence/job-persistence/build.gradle.kts b/airbyte-persistence/job-persistence/build.gradle.kts index 8ee530a72dc..b0444ec21a3 100644 --- a/airbyte-persistence/job-persistence/build.gradle.kts +++ b/airbyte-persistence/job-persistence/build.gradle.kts @@ -26,6 +26,7 @@ dependencies { implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-db:jooq")) implementation(project(":oss:airbyte-db:db-lib")) + implementation(project(":oss:airbyte-data")) implementation(libs.airbyte.protocol) implementation(project(":oss:airbyte-config:config-persistence")) implementation(project(":oss:airbyte-featureflag")) diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java index 3b5d9817a2f..0654b940929 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/factory/OAuthConfigSupplier.java @@ -19,6 +19,7 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.MoreOAuthParameters; import io.airbyte.persistence.job.tracker.TrackingMetadata; import io.airbyte.protocol.models.ConnectorSpecification; @@ -47,13 +48,16 @@ public class OAuthConfigSupplier { private final ConfigRepository configRepository; private final TrackingClient trackingClient; private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private final OAuthService oAuthService; public OAuthConfigSupplier(final ConfigRepository configRepository, final TrackingClient trackingClient, - final ActorDefinitionVersionHelper actorDefinitionVersionHelper) { + final ActorDefinitionVersionHelper actorDefinitionVersionHelper, + final OAuthService oauthService) { this.configRepository = configRepository; this.trackingClient = trackingClient; this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; + this.oAuthService = oauthService; } /** @@ -83,7 +87,7 @@ public JsonNode maskSourceOAuthParameters(final UUID sourceDefinitionId, throws IOException { try { final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); - configRepository.getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId) + oAuthService.getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId) .ifPresent(sourceOAuthParameter -> maskOauthParameters(sourceDefinition.getName(), sourceConnectorSpec, sourceConnectorConfig)); return sourceConnectorConfig; } catch (final JsonValidationException | ConfigNotFoundException e) { @@ -108,7 +112,7 @@ public JsonNode maskDestinationOAuthParameters(final UUID destinationDefinitionI throws IOException { try { final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); - configRepository.getDestinationOAuthParameterOptional(workspaceId, destinationDefinitionId) + oAuthService.getDestinationOAuthParameterOptional(workspaceId, destinationDefinitionId) .ifPresent(destinationOAuthParameter -> maskOauthParameters(destinationDefinition.getName(), destinationConnectorSpec, destinationConnectorConfig)); return destinationConnectorConfig; @@ -135,7 +139,7 @@ public JsonNode injectSourceOAuthParameters(final UUID sourceDefinitionId, try { final StandardSourceDefinition sourceDefinition = configRepository.getStandardSourceDefinition(sourceDefinitionId); final ActorDefinitionVersion sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, workspaceId, sourceId); - configRepository.getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId) + oAuthService.getSourceOAuthParameterOptional(workspaceId, sourceDefinitionId) .ifPresent(sourceOAuthParameter -> { if (injectOAuthParameters(sourceDefinition.getName(), sourceVersion.getSpec(), sourceOAuthParameter.getConfiguration(), sourceConnectorConfig)) { @@ -168,7 +172,7 @@ public JsonNode injectDestinationOAuthParameters(final UUID destinationDefinitio final StandardDestinationDefinition destinationDefinition = configRepository.getStandardDestinationDefinition(destinationDefinitionId); final ActorDefinitionVersion destinationVersion = actorDefinitionVersionHelper.getDestinationVersion(destinationDefinition, workspaceId, destinationId); - configRepository.getDestinationOAuthParameterOptional(workspaceId, destinationDefinitionId) + oAuthService.getDestinationOAuthParameterOptional(workspaceId, destinationDefinitionId) .ifPresent(destinationOAuthParameter -> { if (injectOAuthParameters(destinationDefinition.getName(), destinationVersion.getSpec(), destinationOAuthParameter.getConfiguration(), destinationConnectorConfig)) { diff --git a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java index a7cf59fa8c9..2f8eefe8df8 100644 --- a/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java +++ b/airbyte-persistence/job-persistence/src/test/java/io/airbyte/persistence/job/factory/OAuthConfigSupplierTest.java @@ -24,6 +24,7 @@ import io.airbyte.config.persistence.ActorDefinitionVersionHelper; import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; +import io.airbyte.data.services.OAuthService; import io.airbyte.oauth.MoreOAuthParameters; import io.airbyte.protocol.models.AdvancedAuth; import io.airbyte.protocol.models.AdvancedAuth.AuthFlowType; @@ -63,13 +64,15 @@ class OAuthConfigSupplierTest { private ActorDefinitionVersion testSourceVersion; private ConnectorSpecification testConnectorSpecification; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; + private OAuthService oAuthService; @BeforeEach void setup() throws JsonValidationException, ConfigNotFoundException, IOException { configRepository = mock(ConfigRepository.class); trackingClient = mock(TrackingClient.class); actorDefinitionVersionHelper = mock(ActorDefinitionVersionHelper.class); - oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, trackingClient, actorDefinitionVersionHelper); + oAuthService = mock(OAuthService.class); + oAuthConfigSupplier = new OAuthConfigSupplier(configRepository, trackingClient, actorDefinitionVersionHelper, oAuthService); sourceDefinitionId = UUID.randomUUID(); testSourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(sourceDefinitionId) @@ -265,7 +268,7 @@ void testOAuthInjectionScopedToWorkspace() throws JsonValidationException, IOExc final UUID workspaceId = UUID.randomUUID(); final UUID sourceId = UUID.randomUUID(); final Map oauthParameters = generateOAuthParameters(); - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of( + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of( new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) .withSourceDefinitionId(sourceDefinitionId) @@ -360,7 +363,7 @@ private void setupStandardDefinitionMock(final AdvancedAuth advancedAuth) throws } private void setupOAuthParamMocks(final Map oauthParameters) throws JsonValidationException, IOException { - when(configRepository.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of( + when(oAuthService.getSourceOAuthParameterOptional(any(), any())).thenReturn(Optional.of( new SourceOAuthParameter() .withOauthParameterId(UUID.randomUUID()) .withSourceDefinitionId(sourceDefinitionId) diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java index cb3e252e053..067c949b610 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java @@ -16,7 +16,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.ConnectorBuilderService; import io.airbyte.data.services.DestinationService; -import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.OperationService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -101,7 +100,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin final ConnectionService connectionService, final ConnectorBuilderService connectorBuilderService, final DestinationService destinationService, - final OAuthService oauthService, final OperationService operationService, final SourceService sourceService, final WorkspaceService workspaceService) { @@ -111,7 +109,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin connectionService, connectorBuilderService, destinationService, - oauthService, operationService, sourceService, workspaceService); diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java index bb3d9a7accc..ca731f160b1 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/TemporalBeanFactory.java @@ -15,6 +15,7 @@ import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; +import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.persistence.job.errorreporter.JobErrorReporter; @@ -33,8 +34,9 @@ public class TemporalBeanFactory { @Singleton public OAuthConfigSupplier oAuthConfigSupplier(final ConfigRepository configRepository, final TrackingClient trackingClient, - final ActorDefinitionVersionHelper actorDefinitionVersionHelper) { - return new OAuthConfigSupplier(configRepository, trackingClient, actorDefinitionVersionHelper); + final ActorDefinitionVersionHelper actorDefinitionVersionHelper, + final OAuthService oAuthService) { + return new OAuthConfigSupplier(configRepository, trackingClient, actorDefinitionVersionHelper, oAuthService); } @Singleton From fcb361ead1c2a4ba9b328e6696ac416f2c56dbb2 Mon Sep 17 00:00:00 2001 From: Alex Buchanan Date: Mon, 30 Sep 2024 11:20:29 -0700 Subject: [PATCH 19/36] refactor: make helm tests work more easily (#14182) --- charts/helm-tests/Makefile | 4 +- charts/helm-tests/README.md | 19 +- charts/helm-tests/chart_path.go | 28 + charts/helm-tests/go.mod | 2 +- .../basic_install_test.go | 26 +- charts/helm-tests/integration_tests/init.go | 5 + .../cluster => integration_tests}/kind.go | 2 +- .../cluster => integration_tests}/provider.go | 2 +- charts/helm-tests/tests/app_utils_test.go | 28 + .../helm-tests/tests/basic_template_test.go | 319 ++++-------- .../helm-tests/tests/database_config_test.go | 113 +---- .../tests/enterprise_config_test.go | 76 +-- charts/helm-tests/tests/helm_opts.go | 52 ++ charts/helm-tests/tests/init.go | 20 - charts/helm-tests/tests/init_test.go | 7 + charts/helm-tests/tests/k8s_utils.go | 477 ------------------ charts/helm-tests/tests/k8s_utils_test.go | 306 +++++++++++ .../helm-tests/tests/storage_config_test.go | 206 +++----- charts/helm-tests/tests/topology_test.go | 354 +++---------- charts/helm-tests/tests/utils.go | 40 -- 20 files changed, 768 insertions(+), 1318 deletions(-) create mode 100644 charts/helm-tests/chart_path.go rename charts/helm-tests/{tests => integration_tests}/basic_install_test.go (86%) create mode 100644 charts/helm-tests/integration_tests/init.go rename charts/helm-tests/{pkg/cluster => integration_tests}/kind.go (98%) rename charts/helm-tests/{pkg/cluster => integration_tests}/provider.go (89%) create mode 100644 charts/helm-tests/tests/app_utils_test.go create mode 100644 charts/helm-tests/tests/helm_opts.go delete mode 100644 charts/helm-tests/tests/init.go create mode 100644 charts/helm-tests/tests/init_test.go delete mode 100644 charts/helm-tests/tests/k8s_utils.go create mode 100644 charts/helm-tests/tests/k8s_utils_test.go delete mode 100644 charts/helm-tests/tests/utils.go diff --git a/charts/helm-tests/Makefile b/charts/helm-tests/Makefile index eed05708e15..bcd7f7ea32b 100644 --- a/charts/helm-tests/Makefile +++ b/charts/helm-tests/Makefile @@ -1,10 +1,10 @@ .PHONY: test.unit: - go test ./tests -tags=template -v + go test ./tests/... -v -count=1 -timeout 0s .PHONY: test.integration: - go test ./tests -tags=install -v + go test ./integration_tests/... -v -count=1 -timeout 0s .PHONY: test.clean: diff --git a/charts/helm-tests/README.md b/charts/helm-tests/README.md index 3bbd3788d3f..22172fbd1a7 100644 --- a/charts/helm-tests/README.md +++ b/charts/helm-tests/README.md @@ -7,11 +7,18 @@ Utilities for testing and generating Helm charts. The tests in this repository are meant to be run against the Airbyte Helm Chart (OSS). ``` -export HELM_CHART_PATH= -go test ./tests -tags=template +go test -timeout=0s -v -count=1 ./tests ``` -There are a few different tags for the test: -* `template` - these are template tests which render the Helm templates and verify the yaml that will be submitted to Kubernetes -* `install` - these will spin up a local K8s clutser (Kind) and test installing the chart -* `storage_config` - these tests verify the various options for storage configuration (i.e. logs, state) +The `-count=1` is important to avoid Go's test caching, which doesn't work with our external helm files. + +If you're using VSCode, you might want to add the following to settings.json: +``` + "go.testTimeout": "0s", + "go.testFlags": [ + "-count=1" + ] +``` + +The `./tests` directory contains tests that render the chart and verify the output. +The `./integration_tests` directory contains tests that run a k8s cluster and actually install the chart in the cluster. \ No newline at end of file diff --git a/charts/helm-tests/chart_path.go b/charts/helm-tests/chart_path.go new file mode 100644 index 00000000000..68947c30b93 --- /dev/null +++ b/charts/helm-tests/chart_path.go @@ -0,0 +1,28 @@ +package helmtests + +import ( + "fmt" + "os" + "path/filepath" + "strings" +) + +func DetermineChartPath() string { + + if p := os.Getenv("HELM_CHART_PATH"); p != "" { + return p + } + + // Try to guess the chart path from the current working directory. + if cwd, err := os.Getwd(); err == nil { + cwd = filepath.ToSlash(cwd) + idx := strings.Index(cwd, "oss/charts") + if idx != -1 { + return cwd[:idx+10] + "/airbyte" + } + } + + fmt.Fprintf(os.Stderr, "error: couldn't automatically determine chart path. try setting HELM_CHART_PATH environment variable") + os.Exit(1) + return "" +} diff --git a/charts/helm-tests/go.mod b/charts/helm-tests/go.mod index e2e0af20315..b670f9b45a0 100644 --- a/charts/helm-tests/go.mod +++ b/charts/helm-tests/go.mod @@ -1,6 +1,6 @@ module github.com/airbytehq/airbyte-platform-internal/oss/charts/helm-tests -go 1.22.1 +go 1.23.1 require ( github.com/gruntwork-io/terratest v0.46.14 diff --git a/charts/helm-tests/tests/basic_install_test.go b/charts/helm-tests/integration_tests/basic_install_test.go similarity index 86% rename from charts/helm-tests/tests/basic_install_test.go rename to charts/helm-tests/integration_tests/basic_install_test.go index 9fd9f541b0b..2db6dd8f841 100644 --- a/charts/helm-tests/tests/basic_install_test.go +++ b/charts/helm-tests/integration_tests/basic_install_test.go @@ -1,13 +1,11 @@ -//go:build install - -package test +package integration import ( "context" "path/filepath" "testing" - "github.com/airbytehq/airbyte-platform-internal/oss/charts/helm-tests/pkg/cluster" + "github.com/airbytehq/airbyte-platform-internal/oss/charts/helm-tests/tests" "github.com/gruntwork-io/terratest/modules/helm" "github.com/gruntwork-io/terratest/modules/k8s" "github.com/gruntwork-io/terratest/modules/logger" @@ -17,7 +15,7 @@ import ( ) func TestBasicInstallWithDefaultValues(t *testing.T) { - cls := cluster.NewKindCluster() + cls := NewKindCluster() err := cls.Provision() require.NoError(t, err, "failure provisioning KIND cluster") defer cls.Deprovision() @@ -49,7 +47,7 @@ func TestBasicInstallWithDefaultValues(t *testing.T) { } func TestBasicEnterpriseInstallWithDefaultValues(t *testing.T) { - cls := cluster.NewKindCluster() + cls := NewKindCluster() err := cls.Provision() require.NoError(t, err, "failure provisioning KIND cluster") defer cls.Deprovision() @@ -70,7 +68,7 @@ func TestBasicEnterpriseInstallWithDefaultValues(t *testing.T) { require.NoError(t, err) t.Run("should fail to install if required values are missing", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterprise() + helmOpts := tests.BaseHelmOptionsForEnterprise() helmOpts.KubectlOptions = &k8s.KubectlOptions{ Namespace: releaseNamespace, } @@ -100,7 +98,7 @@ func TestBasicEnterpriseInstallWithDefaultValues(t *testing.T) { require.NoError(t, err) defer k8sClient.CoreV1().Secrets(releaseName).Delete(context.Background(), "airbyte-config-secrets", metav1.DeleteOptions{}) - helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts := tests.BaseHelmOptionsForEnterpriseWithValues() helmOpts.KubectlOptions = &k8s.KubectlOptions{ Namespace: releaseNamespace, } @@ -110,12 +108,16 @@ func TestBasicEnterpriseInstallWithDefaultValues(t *testing.T) { }) t.Run("should install successfully with airbyte.yml as a file", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithAirbyteYml() - helmOpts.KubectlOptions = &k8s.KubectlOptions{ + opts := tests.BaseHelmOptions() + opts.SetValues["global.edition"] = "enterprise" + opts.SetFiles = map[string]string{ + "global.airbyteYml": "../tests/fixtures/airbyte.yaml", + } + opts.KubectlOptions = &k8s.KubectlOptions{ Namespace: releaseNamespace, } - err = helm.InstallE(t, helmOpts, chartPath, releaseName) - defer helm.DeleteE(t, helmOpts, releaseName, true) + err = helm.InstallE(t, opts, chartPath, releaseName) + defer helm.DeleteE(t, opts, releaseName, true) require.NoError(t, err) }) } diff --git a/charts/helm-tests/integration_tests/init.go b/charts/helm-tests/integration_tests/init.go new file mode 100644 index 00000000000..20f2375c29d --- /dev/null +++ b/charts/helm-tests/integration_tests/init.go @@ -0,0 +1,5 @@ +package integration + +import helmtests "github.com/airbytehq/airbyte-platform-internal/oss/charts/helm-tests" + +var chartPath string = helmtests.DetermineChartPath() diff --git a/charts/helm-tests/pkg/cluster/kind.go b/charts/helm-tests/integration_tests/kind.go similarity index 98% rename from charts/helm-tests/pkg/cluster/kind.go rename to charts/helm-tests/integration_tests/kind.go index 1610b89b480..aa897561af2 100644 --- a/charts/helm-tests/pkg/cluster/kind.go +++ b/charts/helm-tests/integration_tests/kind.go @@ -1,4 +1,4 @@ -package cluster +package integration import ( "fmt" diff --git a/charts/helm-tests/pkg/cluster/provider.go b/charts/helm-tests/integration_tests/provider.go similarity index 89% rename from charts/helm-tests/pkg/cluster/provider.go rename to charts/helm-tests/integration_tests/provider.go index 521ad12350d..3987a09b077 100644 --- a/charts/helm-tests/pkg/cluster/provider.go +++ b/charts/helm-tests/integration_tests/provider.go @@ -1,4 +1,4 @@ -package cluster +package integration // ClusterProvider is an interface for implementing a cluster provider. type ClusterProvider interface { diff --git a/charts/helm-tests/tests/app_utils_test.go b/charts/helm-tests/tests/app_utils_test.go new file mode 100644 index 00000000000..836befad310 --- /dev/null +++ b/charts/helm-tests/tests/app_utils_test.go @@ -0,0 +1,28 @@ +package tests + +import ( + "fmt" + + "github.com/gruntwork-io/terratest/modules/helm" +) + +var allApps = []string{ + "server", + "webapp", + "connector-builder-server", + "worker", + "cron", + "keycloak", + "keycloak-setup", + "airbyte-bootloader", + "metrics", + "temporal", + "pod-sweeper", + "workload-api-server", + "workload-launcher", +} + +func setAppOpt(opts *helm.Options, appName, name, value string) { + key := fmt.Sprintf("%s.%s", appName, name) + opts.SetValues[key] = value +} diff --git a/charts/helm-tests/tests/basic_template_test.go b/charts/helm-tests/tests/basic_template_test.go index ad4df3b5c46..051d5d55028 100644 --- a/charts/helm-tests/tests/basic_template_test.go +++ b/charts/helm-tests/tests/basic_template_test.go @@ -1,19 +1,102 @@ -//go:build template - -package test +package tests import ( + "maps" + "slices" "testing" - "github.com/gruntwork-io/terratest/modules/helm" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) -// TODO: move this to a common package or file -// These are all of the common keys that we expect to see populated in the config map, regardless of the value of -// `global.edition`. -var commonConfigMapKeys = toStringSet( +func TestHelmTemplateWithDefaultValues(t *testing.T) { + + chartYaml := renderChart(t, BaseHelmOptions()) + envMap := getConfigMap(chartYaml, "airbyte-airbyte-env") + + t.Run("storage configs", func(t *testing.T) { + assert.Equal(t, envMap.Data["STORAGE_TYPE"], "minio") + assert.Equal(t, envMap.Data["STORAGE_BUCKET_LOG"], "airbyte-storage") + assert.Equal(t, envMap.Data["STORAGE_BUCKET_STATE"], "airbyte-storage") + assert.Equal(t, envMap.Data["MINIO_ENDPOINT"], "http://airbyte-minio-svc:9000") + assert.Equal(t, envMap.Data["S3_PATH_STYLE_ACCESS"], "true") + }) + + t.Run("metrics client", func(t *testing.T) { + assert.Empty(t, envMap.Data["METRIC_CLIENT"]) + assert.Empty(t, envMap.Data["OTEL_COLLECTOR_ENDPOINT"]) + }) + + t.Run("airbyte-env configmap", func(t *testing.T) { + // Make sure the env config map has all (and only) the expected keys. + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") + keys := slices.Collect(maps.Keys(configMap.Data)) + assert.ElementsMatch(t, keys, commonConfigMapKeys) + }) + + t.Run("airbyte-secrets secret", func(t *testing.T) { + // Make sure the secret has all (and only) the expected keys. + secret := getSecret(chartYaml, "airbyte-airbyte-secrets") + keys := slices.Collect(maps.Keys(secret.StringData)) + assert.ElementsMatch(t, keys, commonSecretkeys) + }) + + t.Run("the airbyte-airbyte-yml secret is not created by default", func(t *testing.T) { + // The airbyte-airbyte-yml secret is not created by default. + // The global.airbyteYml value must be set in order to render this resource. + assertNoResource(t, chartYaml, "Secret", "airbyte-airbyte-yml") + }) + + t.Run("service account is created by default", func(t *testing.T) { + assert.NotNil(t, getServiceAccount(chartYaml, "airbyte-admin")) + assert.NotNil(t, getRole(chartYaml, "airbyte-admin-role")) + getRoleBinding(chartYaml, "airbyte-admin-binding") + }) +} + +func TestAirbyteYmlSecret(t *testing.T) { + // The airbyte-airbyte-yml secret is created when the global.airbyteYml value is set. + opts := BaseHelmOptions() + opts.SetFiles = map[string]string{ + "global.airbyteYml": "fixtures/airbyte.yaml", + } + chartYml := renderChart(t, opts) + secret := getSecret(chartYml, "airbyte-airbyte-yml") + assert.Equal(t, secret.Name, "airbyte-airbyte-yml") + assert.NotEmpty(t, secret.Data["fileContents"]) +} + +func TestEnterpriseConfigKeys(t *testing.T) { + opts := BaseHelmOptionsForEnterpriseWithValues() + chartYaml := renderChart(t, opts) + + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") + keys := slices.Collect(maps.Keys(configMap.Data)) + assert.ElementsMatch(t, keys, enterpriseEditionConfigMapKeys) + + secret := getSecret(chartYaml, "airbyte-airbyte-secrets") + keys = slices.Collect(maps.Keys(secret.StringData)) + assert.ElementsMatch(t, keys, enterpriseEditionSecretKeys) +} + +func TestProConfigKeys(t *testing.T) { + opts := BaseHelmOptions() + opts.SetValues["global.edition"] = "pro" + opts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" + opts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" + chartYaml := renderChart(t, opts) + + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") + keys := slices.Collect(maps.Keys(configMap.Data)) + assert.ElementsMatch(t, keys, enterpriseEditionConfigMapKeys) + + secret := getSecret(chartYaml, "airbyte-airbyte-secrets") + keys = slices.Collect(maps.Keys(secret.StringData)) + assert.ElementsMatch(t, keys, enterpriseEditionSecretKeys) +} + +// These are all of the common keys that we expect to see populated in the config map, +// regardless of the value of `global.edition`. +var commonConfigMapKeys = []string{ "ACTIVITY_INITIAL_DELAY_BETWEEN_ATTEMPTS_SECONDS", "ACTIVITY_MAX_ATTEMPT", "ACTIVITY_MAX_DELAY_BETWEEN_ATTEMPTS_SECONDS", @@ -85,233 +168,31 @@ var commonConfigMapKeys = toStringSet( "PUB_SUB_ENABLED", "PUB_SUB_TOPIC_NAME", "ENTERPRISE_SOURCE_STUBS_URL", -) +} -var proEditionConfigMapKeys = toStringSet( +var proEditionConfigMapKeys = append([]string{ "INITIAL_USER_FIRST_NAME", "INITIAL_USER_LAST_NAME", - "KEYCLOAK_INTERNAL_HOST", "KEYCLOAK_PORT", "KEYCLOAK_HOSTNAME_URL", "KEYCLOAK_JAVA_OPTS_APPEND", -) +}, commonConfigMapKeys...) // update these if they ever diverge from "pro" var enterpriseEditionConfigMapKeys = proEditionConfigMapKeys -var commonSecretkeys = toStringSet( +var commonSecretkeys = []string{ "DATABASE_USER", + "DATABASE_PASSWORD", "MINIO_ACCESS_KEY_ID", "MINIO_SECRET_ACCESS_KEY", "WORKLOAD_API_BEARER_TOKEN", -) +} -var proEditionSecretKeys = toStringSet( +var proEditionSecretKeys = append([]string{ "KEYCLOAK_ADMIN_USER", "KEYCLOAK_ADMIN_PASSWORD", -) +}, commonSecretkeys...) // update these if they ever diverge from "pro" var enterpriseEditionSecretKeys = proEditionSecretKeys - -func TestHelmTemplateWithDefaultValues(t *testing.T) { - - t.Run("basic template render", func(t *testing.T) { - helmOpts := baseHelmOptions() - _, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - require.NoError(t, err, "failure rendering template") - }) - - t.Run("verify airbyte-env configmap for edition", func(t *testing.T) { - cases := []struct { - edition string - expectedKeys set[string] - }{ - { - edition: "community", - expectedKeys: commonConfigMapKeys, - }, - { - edition: "enterprise", - expectedKeys: enterpriseEditionConfigMapKeys.union(commonConfigMapKeys), - }, - { - edition: "pro", - expectedKeys: proEditionConfigMapKeys.union(commonConfigMapKeys), - }, - } - - for _, c := range cases { - t.Run("edition="+c.edition, func(t *testing.T) { - var helmOpts *helm.Options - switch c.edition { - case "community": - helmOpts = baseHelmOptions() - case "pro", "enterprise": - helmOpts = baseHelmOptionsForEnterpriseWithAirbyteYml() - } - - configMapYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/env-configmap.yaml"}) - require.NoError(t, err, "failure rendering template") - - configMap, err := getConfigMap(configMapYaml, "airbyte-airbyte-env") - assert.NotNil(t, configMap) - require.NoError(t, err) - - // verify keys that we find the expected keys - for _, key := range c.expectedKeys.keys() { - _, ok := configMap.Data[key] - assert.True(t, ok, "expected key %s in ConfigMap for edition %s", key, c.edition) - } - - // verify that we don't find any unexpected keys - for key := range configMap.Data { - assert.True(t, c.expectedKeys.contains(key), "%s is not an expected ConfigMap key for edition %s", key, c.edition) - } - }) - } - }) - - t.Run("verify airbyte-secrets secret for edition", func(t *testing.T) { - cases := []struct { - edition string - expectedKeys set[string] - }{ - { - edition: "community", - expectedKeys: commonSecretkeys, - }, - { - edition: "enterprise", - expectedKeys: commonSecretkeys.union(enterpriseEditionSecretKeys), - }, - { - edition: "pro", - expectedKeys: commonSecretkeys.union(proEditionSecretKeys), - }, - } - - for _, c := range cases { - t.Run("edition="+c.edition, func(t *testing.T) { - var helmOpts *helm.Options - switch c.edition { - case "community": - helmOpts = baseHelmOptions() - case "pro", "enterprise": - helmOpts = baseHelmOptionsForEnterpriseWithAirbyteYml() - } - - secretYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/secret.yaml"}) - require.NoError(t, err, "failure rendering template") - - secret, err := getSecret(secretYaml, "airbyte-airbyte-secrets") - assert.NotNil(t, secret) - require.NoError(t, err) - - for _, key := range c.expectedKeys.keys() { - _, ok := secret.StringData[key] - assert.True(t, ok, "expected key %s not found in secret", key) - } - }) - } - }) - - t.Run("verify airbyte-yml secret", func(t *testing.T) { - cases := []struct { - airbyteYamlFile string - shouldRender bool - }{ - { - airbyteYamlFile: "", - shouldRender: false, - }, - { - airbyteYamlFile: "fixtures/airbyte.yaml", - shouldRender: true, - }, - } - - for _, c := range cases { - t.Run("airbyteYaml="+c.airbyteYamlFile, func(t *testing.T) { - helmOpts := baseHelmOptions() - - if c.airbyteYamlFile == "" { - _, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/airbyte-yml-secret.yaml"}) - require.Error(t, err, "template should not render if empty") - return - } - - if c.airbyteYamlFile != "" { - helmOpts.SetFiles = map[string]string{ - "global.airbyteYml": c.airbyteYamlFile, - } - } - - secretYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/airbyte-yml-secret.yaml"}) - require.NoError(t, err, "failure rendering template") - - secret, err := getSecret(secretYaml, "airbyte-airbyte-yml") - assert.NotNil(t, secret) - require.NoError(t, err) - - if c.shouldRender { - assert.Equal(t, secret.Name, "airbyte-airbyte-yml") - assert.NotEmpty(t, secret.Data["fileContents"]) - } else { - assert.Empty(t, secret.Name) - } - }) - } - }) - - t.Run("default storage configs", func(t *testing.T) { - helmOpts := baseHelmOptions() - - configMapYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/env-configmap.yaml"}) - require.NoError(t, err, "failure rendering template") - - configMap, err := getConfigMap(configMapYaml, "airbyte-airbyte-env") - assert.NotNil(t, configMap) - require.NoError(t, err) - - // default should be in-cluster minio - assert.Equal(t, configMap.Data["STORAGE_TYPE"], "minio") - assert.Equal(t, configMap.Data["STORAGE_BUCKET_LOG"], "airbyte-storage") - assert.Equal(t, configMap.Data["STORAGE_BUCKET_STATE"], "airbyte-storage") - assert.Equal(t, configMap.Data["MINIO_ENDPOINT"], "http://airbyte-minio-svc:9000") - assert.Equal(t, configMap.Data["S3_PATH_STYLE_ACCESS"], "true") - }) - - t.Run("default metrics client", func(t *testing.T) { - helmOpts := baseHelmOptions() - - configMapYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/env-configmap.yaml"}) - require.NoError(t, err, "failure rendering template") - - configMap, err := getConfigMap(configMapYaml, "airbyte-airbyte-env") - assert.NotNil(t, configMap) - require.NoError(t, err) - - assert.Empty(t, configMap.Data["METRIC_CLIENT"]) - assert.Empty(t, configMap.Data["OTEL_COLLECTOR_ENDPOINT"]) - }) - - t.Run("service account is created by default", func(t *testing.T) { - helmOpts := baseHelmOptions() - - tmplYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/serviceaccount.yaml"}) - require.NoError(t, err, "failure rendering template") - - serviceAccount, err := getServiceAccount(tmplYaml, "airbyte-admin") - assert.NotNil(t, serviceAccount) - require.NoError(t, err) - - role, err := getRole(tmplYaml, "airbyte-admin-role") - assert.NotNil(t, role) - require.NoError(t, err) - - roleBinding, err := getRoleBinding(tmplYaml, "airbyte-admin-binding") - assert.NotNil(t, roleBinding) - require.NoError(t, err) - }) -} diff --git a/charts/helm-tests/tests/database_config_test.go b/charts/helm-tests/tests/database_config_test.go index 4d0f5cee9e1..ddd1f71101c 100644 --- a/charts/helm-tests/tests/database_config_test.go +++ b/charts/helm-tests/tests/database_config_test.go @@ -1,6 +1,4 @@ -//go:build template || database_config - -package test +package tests import ( "fmt" @@ -9,7 +7,7 @@ import ( "github.com/gruntwork-io/terratest/modules/helm" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" - v1 "k8s.io/api/core/v1" + corev1 "k8s.io/api/core/v1" ) // all of the airbyte services that require database config keys @@ -42,30 +40,6 @@ var databaseClients = []struct { "DATABASE_URL": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_URL"), }, }, - //{ - // kind: "Deployment", - // name: "airbyte-workload-api-server", - // expectedEnvVars: map[string]expectedEnvVar{ - // "DATABASE_HOST": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_HOST"), - // "DATABASE_PORT": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_PORT"), - // "DATABASE_DB": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_DB"), - // "DATABASE_USER": expectedSecretVar().RefName("airbyte-airbyte-secrets").RefKey("DATABASE_USER"), - // "DATABASE_PASSWORD": expectedSecretVar().RefName("airbyte-airbyte-secrets").RefKey("DATABASE_PASSWORD"), - // "DATABASE_URL": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_URL"), - // }, - //}, - // { - // kind: "Deployment", - // name: "airbyte-workload-launcher", - // expectedEnvVars: map[string]expectedEnvVar{ - // "DATABASE_HOST": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_HOST"), - // "DATABASE_PORT": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_PORT"), - // "DATABASE_DB": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_DB"), - // "DATABASE_USER": expectedSecretVar().RefName("airbyte-airbyte-secrets").RefKey("DATABASE_USER"), - // "DATABASE_PASSWORD": expectedSecretVar().RefName("airbyte-airbyte-secrets").RefKey("DATABASE_PASSWORD"), - // "DATABASE_URL": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("DATABASE_URL"), - // }, - // }, { kind: "Deployment", name: "airbyte-cron", @@ -137,25 +111,14 @@ var databaseClients = []struct { func TestDefaultDatabaseConfiguration(t *testing.T) { t.Run("should default to using an in-cluster database", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() // enable all the things + helmOpts := BaseHelmOptionsForEnterpriseWithValues() // enable all the things helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" - // helmOpts.SetValues["workload-launcher.enabled"] = "true" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - require.NoError(t, err) - - dbStatefulset, err := getStatefulSet(chartYaml, "airbyte-db") - require.NotNil(t, dbStatefulset) - require.NoError(t, err) + chartYaml := renderChart(t, helmOpts) + getStatefulSet(chartYaml, "airbyte-db") t.Run("default database credentials", func(t *testing.T) { - secret, err := getSecret(chartYaml, "airbyte-airbyte-secrets") - require.NotNil(t, secret) - require.NoError(t, err) - - configMap, err := getConfigMap(chartYaml, "airbyte-airbyte-env") - require.NotNil(t, configMap) - require.NoError(t, err) + secret := getSecret(chartYaml, "airbyte-airbyte-secrets") + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") assert.Equal(t, "airbyte", secret.StringData["DATABASE_USER"]) assert.Equal(t, "airbyte", secret.StringData["DATABASE_PASSWORD"]) @@ -163,23 +126,15 @@ func TestDefaultDatabaseConfiguration(t *testing.T) { }) t.Run("override default database credentials", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() // enable all the things + helmOpts := BaseHelmOptionsForEnterpriseWithValues() // enable all the things helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" - // helmOpts.SetValues["workload-launcher.enabled"] = "true" helmOpts.SetValues["postgresql.postgresqlUsername"] = "override-user" helmOpts.SetValues["postgresql.postgresqlPassword"] = "override-pass" helmOpts.SetValues["postgresql.postgresqlDatabase"] = "override-db" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - require.NoError(t, err) - secret, err := getSecret(chartYaml, "airbyte-airbyte-secrets") - require.NotNil(t, secret) - require.NoError(t, err) - - configMap, err := getConfigMap(chartYaml, "airbyte-airbyte-env") - require.NotNil(t, configMap) - require.NoError(t, err) + chartYaml := renderChart(t, helmOpts) + secret := getSecret(chartYaml, "airbyte-airbyte-secrets") + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") assert.Equal(t, "override-user", secret.StringData["DATABASE_USER"]) assert.Equal(t, "override-pass", secret.StringData["DATABASE_PASSWORD"]) @@ -189,27 +144,19 @@ func TestDefaultDatabaseConfiguration(t *testing.T) { t.Run("database clients should include database config vars", func(t *testing.T) { for _, c := range databaseClients { t.Run(fmt.Sprintf("%s should include database config env vars", c.name), func(t *testing.T) { - envVars := make(map[string]v1.EnvVar) + envVars := make(map[string]corev1.EnvVar) switch c.kind { case "Pod": - pod, err := getPod(chartYaml, c.name) - assert.NotNil(t, pod) - assert.NoError(t, err) + pod := getPod(chartYaml, c.name) envVars = envVarMap(pod.Spec.Containers[0].Env) case "Job": - job, err := getJob(chartYaml, c.name) - assert.NotNil(t, job) - assert.NoError(t, err) + job := getJob(chartYaml, c.name) envVars = envVarMap(job.Spec.Template.Spec.Containers[0].Env) case "Deployment": - dep, err := getDeployment(chartYaml, c.name) - assert.NotNil(t, dep) - assert.NoError(t, err) + dep := getDeployment(chartYaml, c.name) envVars = envVarMap(dep.Spec.Template.Spec.Containers[0].Env) case "StatefulSet": - ss, err := getStatefulSet(chartYaml, c.name) - assert.NotNil(t, ss) - assert.NoError(t, err) + ss := getStatefulSet(chartYaml, c.name) envVars = envVarMap(ss.Spec.Template.Spec.Containers[0].Env) } @@ -242,10 +189,8 @@ func TestExternalDatabaseConfiguration(t *testing.T) { t.Run("should require `global.database.secretName` if any secret key ref is set", func(t *testing.T) { for _, f := range secretFields { t.Run(f, func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() // enable all the things + helmOpts := BaseHelmOptionsForEnterpriseWithValues() // enable all the things helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" - // helmOpts.SetValues["workload-launcher.enabled"] = "true" helmOpts.SetValues["postgresql.enabled"] = "false" helmOpts.SetValues["global.database.secretName"] = "" @@ -268,10 +213,8 @@ func TestExternalDatabaseConfiguration(t *testing.T) { t.Run("should require the plaintext value of a field if its secretKey ref is not set", func(t *testing.T) { for _, f := range fields { - helmOpts := baseHelmOptionsForEnterpriseWithValues() // enable all the things + helmOpts := BaseHelmOptionsForEnterpriseWithValues() // enable all the things helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" - // helmOpts.SetValues["workload-launcher.enabled"] = "true" helmOpts.SetValues["postgresql.enabled"] = "false" helmOpts.SetValues["global.database.secretName"] = "" @@ -289,7 +232,7 @@ func TestExternalDatabaseConfiguration(t *testing.T) { }) t.Run("should set the DATABASE_USER in the generated secret when plaintext value is provided", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts := BaseHelmOptionsForEnterpriseWithValues() helmOpts.SetValues["postgresql.enabled"] = "false" helmOpts.SetValues["global.database.secretName"] = "database-secret" helmOpts.SetValues["global.database.host"] = "localhost" @@ -298,12 +241,8 @@ func TestExternalDatabaseConfiguration(t *testing.T) { helmOpts.SetValues["global.database.user"] = "octavia" helmOpts.SetValues["global.database.passwordSecretKey"] = "DATABASE_PASSWORD" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - require.NoError(t, err) - - configMap, err := getConfigMap(chartYaml, "airbyte-airbyte-env") - require.NotNil(t, configMap) - require.NoError(t, err) + chartYaml := renderChart(t, helmOpts) + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") assert.Equal(t, "octavia", configMap.Data["DATABASE_USER"]) _, ok := configMap.Data["DATABASE_PASSWORD"] @@ -311,7 +250,7 @@ func TestExternalDatabaseConfiguration(t *testing.T) { }) t.Run("should set the DATABASE_PASSWORD in the config map when plaintext value is provided", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts := BaseHelmOptionsForEnterpriseWithValues() helmOpts.SetValues["postgresql.enabled"] = "false" helmOpts.SetValues["global.database.secretName"] = "database-secret" helmOpts.SetValues["global.database.host"] = "localhost" @@ -320,12 +259,8 @@ func TestExternalDatabaseConfiguration(t *testing.T) { helmOpts.SetValues["global.database.userSecretKey"] = "DATABASE_USER" helmOpts.SetValues["global.database.password"] = "squidward" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - require.NoError(t, err) - - configMap, err := getConfigMap(chartYaml, "airbyte-airbyte-env") - require.NotNil(t, configMap) - require.NoError(t, err) + chartYaml := renderChart(t, helmOpts) + configMap := getConfigMap(chartYaml, "airbyte-airbyte-env") assert.Equal(t, "squidward", configMap.Data["DATABASE_PASSWORD"]) _, ok := configMap.Data["DATABASE_USER"] diff --git a/charts/helm-tests/tests/enterprise_config_test.go b/charts/helm-tests/tests/enterprise_config_test.go index 8d1e75b973e..c1199afae62 100644 --- a/charts/helm-tests/tests/enterprise_config_test.go +++ b/charts/helm-tests/tests/enterprise_config_test.go @@ -1,6 +1,4 @@ -//go:build template || enterprise - -package test +package tests import ( "fmt" @@ -13,7 +11,7 @@ import ( func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { t.Run("require global.enterprise.secretName", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" // set to empty string since it has a default value helmOpts.SetValues["global.enterprise.secretName"] = "" @@ -24,7 +22,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.enterprise.licenseKeySecretKey", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" // set to empty string since it has a default value helmOpts.SetValues["global.enterprise.licenseKeySecretKey"] = "" @@ -35,7 +33,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.instanceAdmin.secretName", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" // set to empty string since it has a default value helmOpts.SetValues["global.auth.instanceAdmin.secretName"] = "" @@ -46,7 +44,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.instanceAdmin.firstName", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -55,7 +53,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.instanceAdmin.lastName", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "" @@ -64,7 +62,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.instanceAdmin.emailSecretKey", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -75,7 +73,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.instanceAdmin.passwordSecretKey", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -86,7 +84,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.identityProvider.secretName when enabling SSO", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -96,7 +94,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.identityProvider.type when enabling SSO", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -111,7 +109,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.identityProvider.oidc.domain when enabling SSO", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -126,7 +124,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.identityProvider.oidc.appName when enabling SSO", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -141,7 +139,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.identityProvider.oidc.clientIdSecretKey when enabling SSO", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -156,7 +154,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("require global.auth.identityProvider.oidc.clientSecretSecretKey when enabling SSO", func(t *testing.T) { - helmOpts := baseHelmOptions() + helmOpts := BaseHelmOptions() helmOpts.SetValues["global.edition"] = "enterprise" helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" @@ -171,7 +169,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { }) t.Run("should set enterprise config env vars", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts := BaseHelmOptionsForEnterpriseWithValues() helmOpts.SetValues["global.enterprise.secretName"] = "airbyte-license" helmOpts.SetValues["global.auth.instanceAdmin.secretName"] = "sso-secrets" helmOpts.SetValues["global.auth.identityProvider.secretName"] = "sso-secrets" @@ -181,9 +179,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { helmOpts.SetValues["global.auth.identityProvider.oidc.clientIdSecretKey"] = "client-id" helmOpts.SetValues["global.auth.identityProvider.oidc.clientSecretSecretKey"] = "client-secret" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - require.NotNil(t, chartYaml) - require.NoError(t, err) + chartYaml := renderChart(t, helmOpts) t.Run("should set required env vars for keycloak setup job", func(t *testing.T) { expectedEnvVarKeys := map[string]expectedEnvVar{ @@ -200,9 +196,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { } // Verify that the keycloak setup job has the correct vars - keycloakSetupJob, err := getJob(chartYaml, "airbyte-keycloak-setup") - assert.NotNil(t, keycloakSetupJob) - assert.NoError(t, err) + keycloakSetupJob := getJob(chartYaml, "airbyte-keycloak-setup") keycloakEnvVars := envVarMap(keycloakSetupJob.Spec.Template.Spec.Containers[0].Env) for k, expected := range expectedEnvVarKeys { @@ -228,9 +222,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { } // Verify that the airbyte server deployment has the correct vars - airbyteServerDep, err := getDeployment(chartYaml, "airbyte-server") - assert.NotNil(t, airbyteServerDep) - assert.NoError(t, err) + airbyteServerDep := getDeployment(chartYaml, "airbyte-server") airbyteServerEnvVars := envVarMap(airbyteServerDep.Spec.Template.Spec.Containers[0].Env) for k, expected := range expectedEnvVarKeys { @@ -245,9 +237,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { "KEYCLOAK_DATABASE_URL": expectedConfigMapVar().RefName("airbyte-airbyte-env").RefKey("KEYCLOAK_DATABASE_URL"), } - keycloakSS, err := getStatefulSet(chartYaml, "airbyte-keycloak") - assert.NotNil(t, keycloakSS) - assert.NoError(t, err) + keycloakSS := getStatefulSet(chartYaml, "airbyte-keycloak") keycloakEnvVars := envVarMap(keycloakSS.Spec.Template.Spec.Containers[0].Env) for k, expected := range expectedEnvVarKeys { @@ -261,7 +251,7 @@ func TestBasicEnterpriseConfigWithHelmValues(t *testing.T) { func TestKeycloakInitContainerOverride(t *testing.T) { t.Run("default keycloak readiness image is curlimages/curl", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts := BaseHelmOptionsForEnterpriseWithValues() helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" helmOpts.SetValues["global.auth.identityProvider.secretName"] = "sso-secrets" @@ -270,16 +260,10 @@ func TestKeycloakInitContainerOverride(t *testing.T) { helmOpts.SetValues["global.auth.identityProvider.oidc.appName"] = "example-app" helmOpts.SetValues["global.auth.identityProvider.oidc.clientIdSecretKey"] = "client-id" helmOpts.SetValues["global.auth.identityProvider.oidc.clientSecretSecretKey"] = "client-secret" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - assert.NoError(t, err) - - keycloakSetupJob, err := getJob(chartYaml, "airbyte-keycloak-setup") - assert.NoError(t, err) - assert.NotNil(t, keycloakSetupJob) - keycloakStatefulSet, err := getStatefulSet(chartYaml, "airbyte-keycloak") - assert.NoError(t, err) - assert.NotNil(t, keycloakStatefulSet) + chartYaml := renderChart(t, helmOpts) + keycloakSetupJob := getJob(chartYaml, "airbyte-keycloak-setup") + keycloakStatefulSet := getStatefulSet(chartYaml, "airbyte-keycloak") setupInitContainers := keycloakSetupJob.Spec.Template.Spec.InitContainers keycloakInitContainers := keycloakStatefulSet.Spec.Template.Spec.InitContainers @@ -288,7 +272,7 @@ func TestKeycloakInitContainerOverride(t *testing.T) { }) t.Run("override init container image ", func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithValues() + helmOpts := BaseHelmOptionsForEnterpriseWithValues() helmOpts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" helmOpts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" helmOpts.SetValues["global.auth.identityProvider.secretName"] = "sso-secrets" @@ -299,16 +283,10 @@ func TestKeycloakInitContainerOverride(t *testing.T) { helmOpts.SetValues["global.auth.identityProvider.oidc.clientSecretSecretKey"] = "client-secret" helmOpts.SetValues["keycloak-setup.initContainers.keycloakReadinessCheck.image"] = "airbyte/custom-curl-image" helmOpts.SetValues["keycloak.initContainers.initDb.image"] = "airbyte/custom-postgres-image" - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - assert.NoError(t, err) - - keycloakSetupJob, err := getJob(chartYaml, "airbyte-keycloak-setup") - assert.NotNil(t, keycloakSetupJob) - assert.NoError(t, err) - keycloakStatefulSet, err := getStatefulSet(chartYaml, "airbyte-keycloak") - assert.NotNil(t, keycloakStatefulSet) - assert.NoError(t, err) + chartYaml := renderChart(t, helmOpts) + keycloakSetupJob := getJob(chartYaml, "airbyte-keycloak-setup") + keycloakStatefulSet := getStatefulSet(chartYaml, "airbyte-keycloak") setupInitContainers := keycloakSetupJob.Spec.Template.Spec.InitContainers keycloakInitContainers := keycloakStatefulSet.Spec.Template.Spec.InitContainers diff --git a/charts/helm-tests/tests/helm_opts.go b/charts/helm-tests/tests/helm_opts.go new file mode 100644 index 00000000000..93a0a90460b --- /dev/null +++ b/charts/helm-tests/tests/helm_opts.go @@ -0,0 +1,52 @@ +package tests + +import ( + "github.com/gruntwork-io/terratest/modules/helm" + "github.com/gruntwork-io/terratest/modules/logger" +) + +func BaseHelmOptions() *helm.Options { + return &helm.Options{ + Logger: logger.Discard, + SetValues: make(map[string]string), + SetJsonValues: make(map[string]string), + ExtraArgs: make(map[string][]string), + // BuildDependencies: true, + } +} + +func BaseHelmOptionsForEnterprise() *helm.Options { + opts := BaseHelmOptions() + opts.SetValues["global.edition"] = "enterprise" + + return opts +} + +func BaseHelmOptionsForEnterpriseWithValues() *helm.Options { + opts := BaseHelmOptions() + opts.SetValues["global.edition"] = "enterprise" + opts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" + opts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" + + return opts +} + +func BaseHelmOptionsForEnterpriseWithAirbyteYml() *helm.Options { + opts := BaseHelmOptions() + opts.SetValues["global.edition"] = "enterprise" + opts.SetFiles = map[string]string{ + "global.airbyteYml": "fixtures/airbyte.yaml", + } + + return opts +} + +func BaseHelmOptionsForStorageType(t string) *helm.Options { + opts := BaseHelmOptions() + opts.SetValues = map[string]string{ + "global.storage.type": t, + "workload-launcher.enabled": "true", + } + + return opts +} diff --git a/charts/helm-tests/tests/init.go b/charts/helm-tests/tests/init.go deleted file mode 100644 index b7de852ef9c..00000000000 --- a/charts/helm-tests/tests/init.go +++ /dev/null @@ -1,20 +0,0 @@ -package test - -import ( - "os" -) - -const chartRepo = "https://airbytehq.github.io/helm-charts" - -var ( - chartPath string - k8sVersion string -) - -func init() { - if chartPath = os.Getenv("HELM_CHART_PATH"); chartPath == "" { - os.Stderr.WriteString("HELM_CHART_PATH environment variable must be set") - } - - k8sVersion = os.Getenv("K8S_VERSION") -} diff --git a/charts/helm-tests/tests/init_test.go b/charts/helm-tests/tests/init_test.go new file mode 100644 index 00000000000..5802776c89e --- /dev/null +++ b/charts/helm-tests/tests/init_test.go @@ -0,0 +1,7 @@ +package tests + +import ( + helmtests "github.com/airbytehq/airbyte-platform-internal/oss/charts/helm-tests" +) + +var chartPath string = helmtests.DetermineChartPath() diff --git a/charts/helm-tests/tests/k8s_utils.go b/charts/helm-tests/tests/k8s_utils.go deleted file mode 100644 index 02e76053b65..00000000000 --- a/charts/helm-tests/tests/k8s_utils.go +++ /dev/null @@ -1,477 +0,0 @@ -package test - -import ( - "encoding/json" - "errors" - "fmt" - "io" - "strings" - "testing" - - "github.com/gruntwork-io/terratest/modules/helm" - "github.com/gruntwork-io/terratest/modules/logger" - "github.com/stretchr/testify/assert" - yaml "gopkg.in/yaml.v3" - appsv1 "k8s.io/api/apps/v1" - batchv1 "k8s.io/api/batch/v1" - corev1 "k8s.io/api/core/v1" - rbac "k8s.io/api/rbac/v1" -) - -func baseHelmOptions() *helm.Options { - return &helm.Options{ - Logger: logger.Discard, - SetValues: make(map[string]string), - SetJsonValues: make(map[string]string), - ExtraArgs: make(map[string][]string), - } -} - -func baseHelmOptionsForEnterprise() *helm.Options { - opts := baseHelmOptions() - opts.SetValues["global.edition"] = "enterprise" - - return opts -} - -func baseHelmOptionsForEnterpriseWithValues() *helm.Options { - opts := baseHelmOptions() - opts.SetValues["global.edition"] = "enterprise" - opts.SetValues["global.auth.instanceAdmin.firstName"] = "Octavia" - opts.SetValues["global.auth.instanceAdmin.lastName"] = "Squidington" - - return opts -} - -func baseHelmOptionsForEnterpriseWithAirbyteYml() *helm.Options { - opts := baseHelmOptions() - opts.SetValues["global.edition"] = "enterprise" - opts.SetFiles = map[string]string{ - "global.airbyteYml": "fixtures/airbyte.yaml", - } - - return opts -} - -func baseHelmOptionsForStorageType(t string) *helm.Options { - opts := baseHelmOptions() - opts.SetValues = map[string]string{ - "global.storage.type": t, - "workload-launcher.enabled": "true", - } - - return opts -} - -func getK8sResourceByKindAndName(renderedYaml, kind, name string) (map[string]any, error) { - decoder := yaml.NewDecoder(strings.NewReader(renderedYaml)) - - var err error - for { - var resource map[string]any - err = decoder.Decode(&resource) - if errors.Is(err, io.EOF) { - break - } - - if resource["kind"] == nil { - continue - } - k := resource["kind"].(string) - n := resource["metadata"].(map[string]any)["name"].(string) - - if k == kind && n == name { - return resource, nil - } - } - - return nil, fmt.Errorf("could not find resource of Kind: %s Name: %s", kind, name) -} - -func getConfigMap(renderedYaml, name string) (*corev1.ConfigMap, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "ConfigMap", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var cm corev1.ConfigMap - err = json.Unmarshal(b, &cm) - if err != nil { - return nil, err - } - - return &cm, nil -} - -func getSecret(renderedYaml, name string) (*corev1.Secret, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "Secret", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var secret corev1.Secret - err = json.Unmarshal(b, &secret) - if err != nil { - return nil, err - } - - return &secret, nil -} - -func getDeployment(renderedYaml, name string) (*appsv1.Deployment, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "Deployment", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var dep appsv1.Deployment - err = json.Unmarshal(b, &dep) - if err != nil { - return nil, err - } - - return &dep, nil -} - -func getStatefulSet(renderedYaml, name string) (*appsv1.StatefulSet, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "StatefulSet", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var ss appsv1.StatefulSet - err = json.Unmarshal(b, &ss) - if err != nil { - return nil, err - } - - return &ss, nil -} - -func getPod(renderedYaml, name string) (*corev1.Pod, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "Pod", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var p corev1.Pod - err = json.Unmarshal(b, &p) - if err != nil { - return nil, err - } - - return &p, nil -} - -func getJob(renderedYaml, name string) (*batchv1.Job, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "Job", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var job batchv1.Job - err = json.Unmarshal(b, &job) - if err != nil { - return nil, err - } - - return &job, nil -} - -func getService(renderedYaml, name string) (*corev1.Service, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "Service", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var svc corev1.Service - err = json.Unmarshal(b, &svc) - if err != nil { - return nil, err - } - - return &svc, nil -} - -func getServiceAccount(renderedYaml, name string) (*corev1.ServiceAccount, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "ServiceAccount", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var sa corev1.ServiceAccount - err = json.Unmarshal(b, &sa) - if err != nil { - return nil, err - } - - return &sa, nil -} - -func getRole(renderedYaml, name string) (*rbac.Role, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "Role", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var role rbac.Role - err = json.Unmarshal(b, &role) - if err != nil { - return nil, err - } - - return &role, nil -} - -func getRoleBinding(renderedYaml, name string) (*rbac.RoleBinding, error) { - m, err := getK8sResourceByKindAndName(renderedYaml, "RoleBinding", name) - if err != nil { - return nil, err - } - - b, err := json.Marshal(m) - if err != nil { - return nil, err - } - - var binding rbac.RoleBinding - err = json.Unmarshal(b, &binding) - if err != nil { - return nil, err - } - - return &binding, nil -} - -func envVarMap(vars []corev1.EnvVar) map[string]corev1.EnvVar { - m := make(map[string]corev1.EnvVar) - for _, k := range vars { - m[k.Name] = k - } - return m -} - -type expectedEnvVar interface { - RefName(name string) expectedEnvVar - RefKey(key string) expectedEnvVar -} - -type expectedVarFromConfigMap struct { - // value to expect for `valueFrom.configMapKeyRef.name` - refName string - // value to expect for `valueFrom.configMapKeyRef.key` - refKey string -} - -func (e expectedVarFromConfigMap) RefName(n string) expectedEnvVar { - e.refName = n - return e -} - -func (e expectedVarFromConfigMap) RefKey(k string) expectedEnvVar { - e.refKey = k - return e -} - -func expectedConfigMapVar() *expectedVarFromConfigMap { - return &expectedVarFromConfigMap{} -} - -type expectedVarFromSecret struct { - // value to expect for `valueFrom.secretKeyRef.name` - refName string - // value to expect for `valueFrom.secretKeyRef.key` - refKey string -} - -func (e expectedVarFromSecret) RefName(n string) expectedEnvVar { - e.refName = n - return e -} - -func (e expectedVarFromSecret) RefKey(k string) expectedEnvVar { - e.refKey = k - return e -} - -func expectedSecretVar() *expectedVarFromSecret { - return &expectedVarFromSecret{} -} - -func verifyEnvVar(t *testing.T, expected expectedEnvVar, actual corev1.EnvVar) { - switch expected := expected.(type) { - case expectedVarFromConfigMap: - assert.NotNil(t, actual.ValueFrom.ConfigMapKeyRef) - assert.Equal(t, expected.refName, actual.ValueFrom.ConfigMapKeyRef.Name) - assert.Equal(t, expected.refKey, actual.ValueFrom.ConfigMapKeyRef.Key) - case expectedVarFromSecret: - assert.NotNil(t, actual.ValueFrom.SecretKeyRef) - assert.Equal(t, expected.refName, actual.ValueFrom.SecretKeyRef.Name) - assert.Equal(t, expected.refKey, actual.ValueFrom.SecretKeyRef.Key) - } -} - -type expectedVolumeMount interface { - Volume(string) expectedVolumeMount - MountPath(string) expectedVolumeMount - SubPath(string) expectedVolumeMount - RefName(string) expectedVolumeMount -} - -type expectedVolumeMountFromConfigMap struct { - volume string - mountPath string - subPath string - refName string -} - -func expectedConfigMapVolumeMount() *expectedVolumeMountFromConfigMap { - return &expectedVolumeMountFromConfigMap{} -} - -func (v expectedVolumeMountFromConfigMap) Volume(name string) expectedVolumeMount { - v.volume = name - return v -} - -func (v expectedVolumeMountFromConfigMap) MountPath(path string) expectedVolumeMount { - v.mountPath = path - return v -} - -func (v expectedVolumeMountFromConfigMap) SubPath(subpath string) expectedVolumeMount { - v.subPath = subpath - return v -} - -func (v expectedVolumeMountFromConfigMap) RefName(ref string) expectedVolumeMount { - v.refName = ref - return v -} - -type expectedVolumeMountFromSecret struct { - volume string - mountPath string - subPath string - refName string -} - -func expectedSecretVolumeMount() *expectedVolumeMountFromSecret { - return &expectedVolumeMountFromSecret{} -} - -func (v expectedVolumeMountFromSecret) Volume(name string) expectedVolumeMount { - v.volume = name - return v -} - -func (v expectedVolumeMountFromSecret) MountPath(path string) expectedVolumeMount { - v.mountPath = path - return v -} - -func (v expectedVolumeMountFromSecret) SubPath(subpath string) expectedVolumeMount { - v.subPath = subpath - return v -} - -func (v expectedVolumeMountFromSecret) RefName(ref string) expectedVolumeMount { - v.refName = ref - return v -} - -func verifyVolumeMountForPod(t *testing.T, expected expectedVolumeMount, pod corev1.PodSpec) { - var vol *corev1.Volume - var volMnt *corev1.VolumeMount - - switch expected := expected.(type) { - case expectedVolumeMountFromConfigMap: - for _, v := range pod.Volumes { - if v.Name == expected.volume { - vol = &v - break - } - } - assert.NotNil(t, vol) - - for _, vm := range pod.Containers[0].VolumeMounts { - if vm.Name == expected.volume { - volMnt = &vm - break - } - } - assert.NotNil(t, volMnt) - - assert.NotNil(t, vol.ConfigMap) - assert.Equal(t, expected.refName, vol.ConfigMap.Name, "volume configMap.name does not match") - assert.Equal(t, expected.mountPath, vol, "volumeMount mountPath does not match") - assert.Equal(t, expected.subPath, volMnt.SubPath, "volumeMount subPath does not match") - case expectedVolumeMountFromSecret: - for _, v := range pod.Volumes { - if v.Name == expected.volume { - vol = &v - break - } - } - assert.NotNil(t, vol) - - for _, vm := range pod.Containers[0].VolumeMounts { - if vm.Name == expected.volume { - volMnt = &vm - break - } - } - assert.NotNil(t, volMnt) - - assert.Equal(t, "t", "t") - assert.NotNil(t, vol.Secret) - assert.Equal(t, expected.refName, vol.Secret.SecretName, "volume secret.secretName does not match") - assert.Equal(t, expected.mountPath, volMnt.MountPath, "volumeMount mountPath does not match") - assert.Equal(t, expected.subPath, volMnt.SubPath, "volumeMount subPath does not match") - } -} diff --git a/charts/helm-tests/tests/k8s_utils_test.go b/charts/helm-tests/tests/k8s_utils_test.go new file mode 100644 index 00000000000..75976e35586 --- /dev/null +++ b/charts/helm-tests/tests/k8s_utils_test.go @@ -0,0 +1,306 @@ +package tests + +import ( + "strings" + "testing" + + "github.com/gruntwork-io/terratest/modules/helm" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + appsv1 "k8s.io/api/apps/v1" + batchv1 "k8s.io/api/batch/v1" + corev1 "k8s.io/api/core/v1" + rbac "k8s.io/api/rbac/v1" + "k8s.io/apimachinery/pkg/runtime" + "k8s.io/client-go/kubernetes/scheme" +) + +func renderChart(t *testing.T, opts *helm.Options) string { + out, err := helm.RenderTemplateE(t, opts, chartPath, "airbyte", nil) + require.NoError(t, err, "failure rendering template") + return out +} + +func decodeK8sResources(renderedYaml string) []runtime.Object { + out := []runtime.Object{} + chunks := strings.Split(renderedYaml, "---") + for _, chunk := range chunks { + if len(chunk) == 0 { + continue + } + obj, _, err := scheme.Codecs.UniversalDeserializer().Decode([]byte(chunk), nil, nil) + if err != nil { + continue + } + out = append(out, obj) + } + return out +} + +func getK8sObjName(obj runtime.Object) string { + i, ok := obj.(interface{ GetName() string }) + if !ok { + return "" + } + return i.GetName() +} + +func getK8sResourceByKindAndName(renderedYaml, kind, name string) runtime.Object { + objs := decodeK8sResources(renderedYaml) + + for _, obj := range objs { + if obj.GetObjectKind().GroupVersionKind().Kind == kind && getK8sObjName(obj) == name { + return obj + } + } + + return nil +} + +func getPodSpec(obj runtime.Object) *corev1.PodSpec { + switch z := obj.(type) { + case *corev1.Pod: + return &z.Spec + case *batchv1.Job: + return &z.Spec.Template.Spec + case *appsv1.Deployment: + return &z.Spec.Template.Spec + case *appsv1.StatefulSet: + return &z.Spec.Template.Spec + default: + return nil + } +} + +func assertNoResource(t *testing.T, renderedYaml, kind, name string) { + m := getK8sResourceByKindAndName(renderedYaml, kind, name) + assert.Nil(t, m) +} + +func getConfigMap(renderedYaml, name string) *corev1.ConfigMap { + return getK8sResourceByKindAndName(renderedYaml, "ConfigMap", name).(*corev1.ConfigMap) +} + +func getSecret(renderedYaml, name string) *corev1.Secret { + return getK8sResourceByKindAndName(renderedYaml, "Secret", name).(*corev1.Secret) +} + +func getDeployment(renderedYaml, name string) *appsv1.Deployment { + return getK8sResourceByKindAndName(renderedYaml, "Deployment", name).(*appsv1.Deployment) +} + +func getStatefulSet(renderedYaml, name string) *appsv1.StatefulSet { + return getK8sResourceByKindAndName(renderedYaml, "StatefulSet", name).(*appsv1.StatefulSet) +} + +func getPod(renderedYaml, name string) *corev1.Pod { + return getK8sResourceByKindAndName(renderedYaml, "Pod", name).(*corev1.Pod) +} + +func getJob(renderedYaml, name string) *batchv1.Job { + return getK8sResourceByKindAndName(renderedYaml, "Job", name).(*batchv1.Job) +} + +func getService(renderedYaml, name string) *corev1.Service { + return getK8sResourceByKindAndName(renderedYaml, "Service", name).(*corev1.Service) +} + +func getServiceAccount(renderedYaml, name string) *corev1.ServiceAccount { + return getK8sResourceByKindAndName(renderedYaml, "ServiceAccount", name).(*corev1.ServiceAccount) +} + +func getRole(renderedYaml, name string) *rbac.Role { + return getK8sResourceByKindAndName(renderedYaml, "Role", name).(*rbac.Role) +} + +func getRoleBinding(renderedYaml, name string) *rbac.RoleBinding { + return getK8sResourceByKindAndName(renderedYaml, "RoleBinding", name).(*rbac.RoleBinding) +} + +func envVarMap(vars []corev1.EnvVar) map[string]corev1.EnvVar { + m := make(map[string]corev1.EnvVar) + for _, k := range vars { + m[k.Name] = k + } + return m +} + +type expectedEnvVar interface { + RefName(name string) expectedEnvVar + RefKey(key string) expectedEnvVar +} + +type expectedVarFromConfigMap struct { + // value to expect for `valueFrom.configMapKeyRef.name` + refName string + // value to expect for `valueFrom.configMapKeyRef.key` + refKey string +} + +func (e expectedVarFromConfigMap) RefName(n string) expectedEnvVar { + e.refName = n + return e +} + +func (e expectedVarFromConfigMap) RefKey(k string) expectedEnvVar { + e.refKey = k + return e +} + +func expectedConfigMapVar() *expectedVarFromConfigMap { + return &expectedVarFromConfigMap{} +} + +type expectedVarFromSecret struct { + // value to expect for `valueFrom.secretKeyRef.name` + refName string + // value to expect for `valueFrom.secretKeyRef.key` + refKey string +} + +func (e expectedVarFromSecret) RefName(n string) expectedEnvVar { + e.refName = n + return e +} + +func (e expectedVarFromSecret) RefKey(k string) expectedEnvVar { + e.refKey = k + return e +} + +func expectedSecretVar() *expectedVarFromSecret { + return &expectedVarFromSecret{} +} + +func verifyEnvVar(t *testing.T, expected expectedEnvVar, actual corev1.EnvVar) { + switch expected := expected.(type) { + case expectedVarFromConfigMap: + assert.NotNil(t, actual.ValueFrom.ConfigMapKeyRef) + assert.Equal(t, expected.refName, actual.ValueFrom.ConfigMapKeyRef.Name) + assert.Equal(t, expected.refKey, actual.ValueFrom.ConfigMapKeyRef.Key) + case expectedVarFromSecret: + assert.NotNil(t, actual.ValueFrom.SecretKeyRef) + assert.Equal(t, expected.refName, actual.ValueFrom.SecretKeyRef.Name) + assert.Equal(t, expected.refKey, actual.ValueFrom.SecretKeyRef.Key) + } +} + +type expectedVolumeMount interface { + Volume(string) expectedVolumeMount + MountPath(string) expectedVolumeMount + SubPath(string) expectedVolumeMount + RefName(string) expectedVolumeMount +} + +type expectedVolumeMountFromConfigMap struct { + volume string + mountPath string + subPath string + refName string +} + +func expectedConfigMapVolumeMount() *expectedVolumeMountFromConfigMap { + return &expectedVolumeMountFromConfigMap{} +} + +func (v expectedVolumeMountFromConfigMap) Volume(name string) expectedVolumeMount { + v.volume = name + return v +} + +func (v expectedVolumeMountFromConfigMap) MountPath(path string) expectedVolumeMount { + v.mountPath = path + return v +} + +func (v expectedVolumeMountFromConfigMap) SubPath(subpath string) expectedVolumeMount { + v.subPath = subpath + return v +} + +func (v expectedVolumeMountFromConfigMap) RefName(ref string) expectedVolumeMount { + v.refName = ref + return v +} + +type expectedVolumeMountFromSecret struct { + volume string + mountPath string + subPath string + refName string +} + +func expectedSecretVolumeMount() *expectedVolumeMountFromSecret { + return &expectedVolumeMountFromSecret{} +} + +func (v expectedVolumeMountFromSecret) Volume(name string) expectedVolumeMount { + v.volume = name + return v +} + +func (v expectedVolumeMountFromSecret) MountPath(path string) expectedVolumeMount { + v.mountPath = path + return v +} + +func (v expectedVolumeMountFromSecret) SubPath(subpath string) expectedVolumeMount { + v.subPath = subpath + return v +} + +func (v expectedVolumeMountFromSecret) RefName(ref string) expectedVolumeMount { + v.refName = ref + return v +} + +func verifyVolumeMountForPod(t *testing.T, expected expectedVolumeMount, pod corev1.PodSpec) { + var vol *corev1.Volume + var volMnt *corev1.VolumeMount + + switch expected := expected.(type) { + case expectedVolumeMountFromConfigMap: + for _, v := range pod.Volumes { + if v.Name == expected.volume { + vol = &v + break + } + } + assert.NotNil(t, vol) + + for _, vm := range pod.Containers[0].VolumeMounts { + if vm.Name == expected.volume { + volMnt = &vm + break + } + } + assert.NotNil(t, volMnt) + + assert.NotNil(t, vol.ConfigMap) + assert.Equal(t, expected.refName, vol.ConfigMap.Name, "volume configMap.name does not match") + assert.Equal(t, expected.mountPath, vol, "volumeMount mountPath does not match") + assert.Equal(t, expected.subPath, volMnt.SubPath, "volumeMount subPath does not match") + case expectedVolumeMountFromSecret: + for _, v := range pod.Volumes { + if v.Name == expected.volume { + vol = &v + break + } + } + assert.NotNil(t, vol) + + for _, vm := range pod.Containers[0].VolumeMounts { + if vm.Name == expected.volume { + volMnt = &vm + break + } + } + assert.NotNil(t, volMnt) + + assert.Equal(t, "t", "t") + assert.NotNil(t, vol.Secret) + assert.Equal(t, expected.refName, vol.Secret.SecretName, "volume secret.secretName does not match") + assert.Equal(t, expected.mountPath, volMnt.MountPath, "volumeMount mountPath does not match") + assert.Equal(t, expected.subPath, volMnt.SubPath, "volumeMount subPath does not match") + } +} diff --git a/charts/helm-tests/tests/storage_config_test.go b/charts/helm-tests/tests/storage_config_test.go index fd13bbb1504..01e301f1690 100644 --- a/charts/helm-tests/tests/storage_config_test.go +++ b/charts/helm-tests/tests/storage_config_test.go @@ -1,6 +1,4 @@ -//go:build template || storage_config - -package test +package tests import ( "fmt" @@ -9,125 +7,79 @@ import ( "github.com/gruntwork-io/terratest/modules/helm" "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" ) const ( defaultStorageSecretName = "airbyte-airbyte-secrets" ) -func TestBasicStorageConfiguration(t *testing.T) { - helmOpts := baseHelmOptions() - - t.Run("configmap and secret keys", func(t *testing.T) { - type ExpectedSecret struct { - Template string - Name string - ExpectedSecretValues map[string][]byte - } +func TestDefaultStorage(t *testing.T) { + opts := BaseHelmOptions() + chartYaml := renderChart(t, opts) + cm := getConfigMap(chartYaml, "airbyte-airbyte-env") + + expect := map[string]string{ + "LOG4J_CONFIGURATION_FILE": "log4j2-minio.xml", + "MINIO_ENDPOINT": "http://airbyte-minio-svc:9000", + "S3_PATH_STYLE_ACCESS": "true", + "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-storage", + "STORAGE_BUCKET_LOG": "airbyte-storage", + "STORAGE_BUCKET_STATE": "airbyte-storage", + "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-storage", + "STORAGE_TYPE": "minio", + } - cases := []struct { - Type string - SetValues map[string]string - ExpectedConfigMapValues map[string]string - ExpectedSecret *ExpectedSecret - }{ + for k, v := range expect { + assert.Equal(t, v, cm.Data[k], "for key "+k) + } +} - { - Type: "minio", - SetValues: map[string]string{}, - ExpectedConfigMapValues: map[string]string{ - "LOG4J_CONFIGURATION_FILE": "log4j2-minio.xml", - "MINIO_ENDPOINT": "http://airbyte-minio-svc:9000", - "S3_PATH_STYLE_ACCESS": "true", - "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-storage", - "STORAGE_BUCKET_LOG": "airbyte-storage", - "STORAGE_BUCKET_STATE": "airbyte-storage", - "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-storage", - "STORAGE_TYPE": "minio", - }, - ExpectedSecret: nil, - }, - { - Type: "gcs", - SetValues: map[string]string{ - // Base64 encoded `{"fake": "fake"}` - "global.storage.gcs.credentialsJson": "eyJmYWtlIjogImZha2UifQ==", - }, - ExpectedConfigMapValues: map[string]string{ - "GOOGLE_APPLICATION_CREDENTIALS": "/secrets/gcs-log-creds/gcp.json", - "LOG4J_CONFIGURATION_FILE": "log4j2-gcs.xml", - "S3_PATH_STYLE_ACCESS": "", - "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-storage", - "STORAGE_BUCKET_LOG": "airbyte-storage", - "STORAGE_BUCKET_STATE": "airbyte-storage", - "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-storage", - "STORAGE_TYPE": "gcs", - }, - ExpectedSecret: &ExpectedSecret{ - Template: "templates/gcs-log-creds-secret.yaml", - Name: "airbyte-gcs-log-creds", - ExpectedSecretValues: map[string][]byte{ - "gcp.json": []byte(`{"fake": "fake"}`), - }, - }, - }, - { - Type: "s3", - SetValues: map[string]string{ - "global.storage.s3.authenticationType": "credentials", - }, - ExpectedConfigMapValues: map[string]string{ - "AWS_DEFAULT_REGION": "", - "LOG4J_CONFIGURATION_FILE": "log4j2-s3.xml", - "S3_PATH_STYLE_ACCESS": "", - "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-storage", - "STORAGE_BUCKET_LOG": "airbyte-storage", - "STORAGE_BUCKET_STATE": "airbyte-storage", - "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-storage", - "STORAGE_TYPE": "s3", - }, - }, - } +func TestGcsStorage(t *testing.T) { + opts := BaseHelmOptions() + // Base64 encoded `{"fake": "fake"}` + opts.SetValues["global.storage.type"] = "gcs" + opts.SetValues["global.storage.gcs.credentialsJson"] = "eyJmYWtlIjogImZha2UifQ==" + chartYaml := renderChart(t, opts) + + cm := getConfigMap(chartYaml, "airbyte-airbyte-env") + expect := map[string]string{ + "GOOGLE_APPLICATION_CREDENTIALS": "/secrets/gcs-log-creds/gcp.json", + "LOG4J_CONFIGURATION_FILE": "log4j2-gcs.xml", + "S3_PATH_STYLE_ACCESS": "", + "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-storage", + "STORAGE_BUCKET_LOG": "airbyte-storage", + "STORAGE_BUCKET_STATE": "airbyte-storage", + "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-storage", + "STORAGE_TYPE": "gcs", + } + for k, v := range expect { + assert.Equal(t, v, cm.Data[k], "for key "+k) + } - for _, c := range cases { - t.Run(fmt.Sprintf("storage type %s", c.Type), func(t *testing.T) { - helmOpts.SetValues["global.storage.type"] = c.Type - for k, v := range c.SetValues { - helmOpts.SetValues[k] = v - } + secret := getSecret(chartYaml, "airbyte-gcs-log-creds") + assert.Equal(t, []byte(`{"fake": "fake"}`), secret.Data["gcp.json"]) +} - t.Run("verify config map values", func(t *testing.T) { - configMapYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{"templates/env-configmap.yaml"}) - require.NoError(t, err, "failure rendering template") - - configMap, err := getConfigMap(configMapYaml, "airbyte-airbyte-env") - assert.NotNil(t, configMap) - require.NoError(t, err) - - // verify expected keys in the config map - for k, v := range c.ExpectedConfigMapValues { - assert.Equal(t, v, configMap.Data[k]) - } - }) - - t.Run("verify secret values", func(t *testing.T) { - if c.ExpectedSecret != nil { - secretYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", []string{c.ExpectedSecret.Template}) - require.NoError(t, err, "failure rendering template") - - secret, err := getSecret(secretYaml, c.ExpectedSecret.Name) - assert.NotNil(t, secret) - require.NoError(t, err) - - for k, v := range c.ExpectedSecret.ExpectedSecretValues { - assert.Equal(t, v, secret.Data[k]) - } - } - }) - }) - } - }) +func TestS3Storage(t *testing.T) { + opts := BaseHelmOptions() + opts.SetValues["global.storage.type"] = "s3" + opts.SetValues["global.storage.s3.authenticationType"] = "credentials" + chartYaml := renderChart(t, opts) + + cm := getConfigMap(chartYaml, "airbyte-airbyte-env") + expect := map[string]string{ + "AWS_DEFAULT_REGION": "", + "LOG4J_CONFIGURATION_FILE": "log4j2-s3.xml", + "S3_PATH_STYLE_ACCESS": "", + "STORAGE_BUCKET_ACTIVITY_PAYLOAD": "airbyte-storage", + "STORAGE_BUCKET_LOG": "airbyte-storage", + "STORAGE_BUCKET_STATE": "airbyte-storage", + "STORAGE_BUCKET_WORKLOAD_OUTPUT": "airbyte-storage", + "STORAGE_TYPE": "s3", + } + for k, v := range expect { + assert.Equal(t, v, cm.Data[k], "for key "+k) + } } func verifyCredentialsForDeployments(t *testing.T, helmOpts *helm.Options, expectedEnvVars map[string]expectedEnvVar, deployments []string) { @@ -148,9 +100,7 @@ func verifyCredentialsForDeployments(t *testing.T, helmOpts *helm.Options, expec storageType := helmOpts.SetValues["global.storage.type"] for _, c := range cases { t.Run(fmt.Sprintf("deployment %s contains expected %s credentials env vars", c.DeploymentName, strings.ToUpper(storageType)), func(t *testing.T) { - dep, err := getDeployment(chartYaml, c.DeploymentName) - assert.NotNil(t, dep) - assert.NoError(t, err) + dep := getDeployment(chartYaml, c.DeploymentName) actualVars := envVarMap(dep.Spec.Template.Spec.Containers[0].Env) for k, expected := range c.ExpectedEnvVars { @@ -180,9 +130,7 @@ func verifyVolumeMountsForDeployments(t *testing.T, helmOpts *helm.Options, expe storageType := helmOpts.SetValues["global.storage.type"] for _, c := range cases { t.Run(fmt.Sprintf("deployment %s contains expected %s volume mounts", c.DeploymentName, strings.ToUpper(storageType)), func(t *testing.T) { - dep, err := getDeployment(chartYaml, c.DeploymentName) - assert.NotNil(t, dep) - assert.NoError(t, err) + dep := getDeployment(chartYaml, c.DeploymentName) for _, expected := range c.ExpectedVolumeMounts { verifyVolumeMountForPod(t, expected, dep.Spec.Template.Spec) @@ -194,7 +142,7 @@ func verifyVolumeMountsForDeployments(t *testing.T, helmOpts *helm.Options, expe func TestS3StorageConfigurationSecrets(t *testing.T) { t.Run("authentication type: credentials", func(t *testing.T) { t.Run("default storageSecretName", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("s3") + helmOpts := BaseHelmOptionsForStorageType("s3") helmOpts.SetValues["global.storage.s3.authenticationType"] = "credentials" expectedEnvVarKeys := map[string]expectedEnvVar{ @@ -205,7 +153,7 @@ func TestS3StorageConfigurationSecrets(t *testing.T) { }) t.Run("user-defined storageSecretName", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("s3") + helmOpts := BaseHelmOptionsForStorageType("s3") helmOpts.SetValues["global.storage.s3.authenticationType"] = "credentials" helmOpts.SetValues["global.storage.secretName"] = "user-defined-secret" @@ -218,7 +166,7 @@ func TestS3StorageConfigurationSecrets(t *testing.T) { }) t.Run("authentication type: instanceProfile", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("s3") + helmOpts := BaseHelmOptionsForStorageType("s3") helmOpts.SetValues["global.storage.s3.authenticationType"] = "instanceProfile" // The AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY must not be set @@ -250,9 +198,7 @@ func TestS3StorageConfigurationSecrets(t *testing.T) { for _, c := range cases { t.Run(fmt.Sprintf("deployment %s must not contain AWS credential env vars", c.Name), func(t *testing.T) { - dep, err := getDeployment(chartYaml, c.Name) - assert.NotNil(t, dep) - assert.NoError(t, err) + dep := getDeployment(chartYaml, c.Name) actualVars := envVarMap(dep.Spec.Template.Spec.Containers[0].Env) for k := range c.UnexpectedEnvVars { @@ -266,24 +212,22 @@ func TestS3StorageConfigurationSecrets(t *testing.T) { func TestGCSStorageConfigurationSecrets(t *testing.T) { t.Run("should return an error if global.storage.gcs is not set", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("gcs") + helmOpts := BaseHelmOptionsForStorageType("gcs") _, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) assert.ErrorContains(t, err, "You must set 'global.storage.gcs'") }) t.Run("should return an error if global.storage.gcs.credentialsJson is not set and default secret is used", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("gcs") + helmOpts := BaseHelmOptionsForStorageType("gcs") helmOpts.SetValues["global.storage.gcs.someKey"] = "dummy-value" _, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) assert.ErrorContains(t, err, "You must set 'global.storage.gcs.credentialsJson'") }) t.Run("should not create gcs-log-creds secret if `storageSecretName` is set", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("gcs") + helmOpts := BaseHelmOptionsForStorageType("gcs") helmOpts.SetValues["global.storage.gcs.projectId"] = "project-id" helmOpts.SetValues["global.storage.storageSecretName"] = "airbyte-config-secrets" - _, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - assert.NoError(t, err) expectedVolumeMounts := []expectedVolumeMount{ expectedSecretVolumeMount(). @@ -296,7 +240,7 @@ func TestGCSStorageConfigurationSecrets(t *testing.T) { t.Run("should mount credentials from a secret", func(t *testing.T) { t.Run("using the default secret", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("gcs") + helmOpts := BaseHelmOptionsForStorageType("gcs") helmOpts.SetValues["global.storage.gcs.credentialsJson"] = "dummy-value" expectedVolumeMounts := []expectedVolumeMount{ expectedSecretVolumeMount(). @@ -308,7 +252,7 @@ func TestGCSStorageConfigurationSecrets(t *testing.T) { }) t.Run("using a user-defined secret", func(t *testing.T) { - helmOpts := baseHelmOptionsForStorageType("gcs") + helmOpts := BaseHelmOptionsForStorageType("gcs") helmOpts.SetValues["global.storage.gcs.credentialsJson"] = "dummy-value" helmOpts.SetValues["global.storage.storageSecretName"] = "customer-secret" expectedVolumeMounts := []expectedVolumeMount{ diff --git a/charts/helm-tests/tests/topology_test.go b/charts/helm-tests/tests/topology_test.go index f3e38a90b17..e07c6293d77 100644 --- a/charts/helm-tests/tests/topology_test.go +++ b/charts/helm-tests/tests/topology_test.go @@ -1,300 +1,114 @@ -package test +package tests import ( "encoding/json" - "fmt" "testing" - "github.com/gruntwork-io/terratest/modules/helm" "github.com/stretchr/testify/assert" corev1 "k8s.io/api/core/v1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) func TestBasicTopologyConfiguration(t *testing.T) { - cases := []struct { - kind string - name string - valuesPath string - }{ - { - kind: "Deployment", - name: "airbyte-server", - valuesPath: "server", - }, - { - kind: "Deployment", - name: "airbyte-webapp", - valuesPath: "webapp", - }, - { - kind: "Deployment", - name: "airbyte-connector-builder-server", - valuesPath: "connector-builder-server", - }, - { - kind: "Deployment", - name: "airbyte-worker", - valuesPath: "worker", - }, - //{ - // kind: "Deployment", - // name: "airbyte-workload-api-server", - // valuesPath: "workload-api-server", - //}, -// { -// kind: "Deployment", -// name: "airbyte-workload-launcher", -// valuesPath: "workload-launcher", -// }, - { - kind: "Deployment", - name: "airbyte-cron", - valuesPath: "cron", - }, - { - kind: "StatefulSet", - name: "airbyte-keycloak", - valuesPath: "keycloak", - }, - { - kind: "Job", - name: "airbyte-keycloak-setup", - valuesPath: "keycloak-setup", - }, - { - kind: "Pod", - name: "airbyte-airbyte-bootloader", - valuesPath: "airbyte-bootloader", - }, - { - kind: "Deployment", - name: "airbyte-metrics", - valuesPath: "metrics", - }, + // use enterprise as a base because it enables more things by default. + opts := BaseHelmOptionsForEnterpriseWithValues() + opts.SetValues["metrics.enabled"] = "true" + + expectSelector := map[string]string{ + "machineSize": "xlarge", + "region": "us-west-2", + } + + expectTolerations := []corev1.Toleration{ { - kind: "Deployment", - name: "airbyte-temporal", - valuesPath: "temporal", + Key: "key1", + Operator: "Equal", + Value: "value1", + Effect: "NoSchedule", }, { - kind: "Deployment", - name: "airbyte-pod-sweeper-pod-sweeper", - valuesPath: "pod-sweeper", + Key: "key2", + Operator: "Equal", + Value: "value2", + Effect: "NoSchedule", }, } - t.Run("configure nodeSelector for workloads", func(t *testing.T) { - t.Parallel() - nodeSelector := map[string]string{ - "machineSize": "xlarge", - "region": "us-west-2", - } - - for _, c := range cases { - t.Run(fmt.Sprintf("verify nodeSelectors are set for %s: %s", c.kind, c.name), func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithAirbyteYml() // enables all the things - helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" -// helmOpts.SetValues["workload-launcher.enabled"] = "true" - for k, v := range nodeSelector { - helmOpts.SetValues[c.valuesPath+".nodeSelector."+k] = v - } - - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - assert.NotEmpty(t, chartYaml) - assert.NoError(t, err) - - switch c.kind { - case "Pod": - pod, err := getPod(chartYaml, c.name) - assert.NotNil(t, pod) - assert.NoError(t, err) - - assert.Equal(t, pod.Name, c.name) - assert.Equal(t, pod.Spec.NodeSelector, nodeSelector) - case "Job": - job, err := getJob(chartYaml, c.name) - assert.NotNil(t, job) - assert.NoError(t, err) - - assert.Equal(t, job.Name, c.name) - assert.Equal(t, job.Spec.Template.Spec.NodeSelector, nodeSelector) - case "Deployment": - dep, err := getDeployment(chartYaml, c.name) - assert.NotNil(t, dep) - assert.NoError(t, err) - - assert.Equal(t, dep.Name, c.name) - assert.Equal(t, dep.Spec.Template.Spec.NodeSelector, nodeSelector) - case "StatefulSet": - ss, err := getStatefulSet(chartYaml, c.name) - assert.NotNil(t, ss) - assert.NoError(t, err) - - assert.Equal(t, ss.Name, c.name) - assert.Equal(t, ss.Spec.Template.Spec.NodeSelector, nodeSelector) - - default: - t.Fatalf("unsupported resource kind: %s", c.kind) - } - }) - } - }) - - t.Run("configure tolerations for workloads", func(t *testing.T) { - t.Parallel() - tolerations := []corev1.Toleration{ - { - Key: "key1", - Operator: "Equal", - Value: "value1", - Effect: "NoSchedule", - }, - { - Key: "key2", - Operator: "Equal", - Value: "value2", - Effect: "NoSchedule", - }, - } - - for _, c := range cases { - t.Run(fmt.Sprintf("verify tolerations are set for %s: %s", c.kind, c.name), func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithAirbyteYml() // enables all the things - helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" -// helmOpts.SetValues["workload-launcher.enabled"] = "true" - for i, tol := range tolerations { - helmOpts.SetValues[c.valuesPath+fmt.Sprintf(".tolerations[%d]", i)+".Key"] = tol.Key - helmOpts.SetValues[c.valuesPath+fmt.Sprintf(".tolerations[%d]", i)+".Operator"] = string(tol.Operator) - helmOpts.SetValues[c.valuesPath+fmt.Sprintf(".tolerations[%d]", i)+".Value"] = tol.Value - helmOpts.SetValues[c.valuesPath+fmt.Sprintf(".tolerations[%d]", i)+".Effect"] = string(tol.Effect) - } - - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - assert.NotEmpty(t, chartYaml) - assert.NoError(t, err) - - switch c.kind { - case "Pod": - pod, err := getPod(chartYaml, c.name) - assert.NotNil(t, pod) - assert.NoError(t, err) - - assert.Equal(t, pod.Name, c.name) - assert.Equal(t, pod.Spec.Tolerations, tolerations) - case "Job": - job, err := getJob(chartYaml, c.name) - assert.NotNil(t, job) - assert.NoError(t, err) - - assert.Equal(t, job.Name, c.name) - assert.Equal(t, job.Spec.Template.Spec.Tolerations, tolerations) - case "Deployment": - dep, err := getDeployment(chartYaml, c.name) - assert.NotNil(t, dep) - assert.NoError(t, err) - - assert.Equal(t, dep.Name, c.name) - assert.Equal(t, dep.Spec.Template.Spec.Tolerations, tolerations) - case "StatefulSet": - ss, err := getStatefulSet(chartYaml, c.name) - assert.NotNil(t, ss) - assert.NoError(t, err) - - assert.Equal(t, ss.Name, c.name) - assert.Equal(t, ss.Spec.Template.Spec.Tolerations, tolerations) - - default: - t.Fatalf("unsupported resource kind: %s", c.kind) - } - }) - } - }) - - t.Run("configure affinity for workloads", func(t *testing.T) { - t.Parallel() - affinity := &corev1.Affinity{ - NodeAffinity: &corev1.NodeAffinity{ - RequiredDuringSchedulingIgnoredDuringExecution: &corev1.NodeSelector{ - NodeSelectorTerms: []corev1.NodeSelectorTerm{ - { - MatchFields: []corev1.NodeSelectorRequirement{ - { - Key: "node-size", - Operator: "Equal", - Values: []string{"xlarge"}, - }, + expectAffinity := &corev1.Affinity{ + NodeAffinity: &corev1.NodeAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: &corev1.NodeSelector{ + NodeSelectorTerms: []corev1.NodeSelectorTerm{ + { + MatchFields: []corev1.NodeSelectorRequirement{ + { + Key: "node-size", + Operator: "Equal", + Values: []string{"xlarge"}, }, }, }, }, }, - PodAffinity: &corev1.PodAffinity{ - RequiredDuringSchedulingIgnoredDuringExecution: []corev1.PodAffinityTerm{ - { - LabelSelector: &metav1.LabelSelector{ - MatchLabels: map[string]string{ - "color": "purple", - }, + }, + PodAffinity: &corev1.PodAffinity{ + RequiredDuringSchedulingIgnoredDuringExecution: []corev1.PodAffinityTerm{ + { + LabelSelector: &metav1.LabelSelector{ + MatchLabels: map[string]string{ + "color": "purple", }, }, }, }, - } - - for _, c := range cases { - t.Run(fmt.Sprintf("verify affinities are set for %s: %s", c.kind, c.name), func(t *testing.T) { - helmOpts := baseHelmOptionsForEnterpriseWithAirbyteYml() // enables all the things - helmOpts.SetValues["metrics.enabled"] = "true" - //helmOpts.SetValues["workload-api-server.enabled"] = "true" -// helmOpts.SetValues["workload-launcher.enabled"] = "true" - - data, err := json.Marshal(affinity) - if err != nil { - t.Error(err) - } - helmOpts.SetJsonValues[c.valuesPath+".affinity"] = string(data) - - chartYaml, err := helm.RenderTemplateE(t, helmOpts, chartPath, "airbyte", nil) - assert.NotEmpty(t, chartYaml) - assert.NoError(t, err) - - switch c.kind { - case "Pod": - pod, err := getPod(chartYaml, c.name) - assert.NotNil(t, pod) - assert.NoError(t, err) - - assert.Equal(t, pod.Name, c.name) - assert.Equal(t, pod.Spec.Affinity, affinity) - case "Job": - job, err := getJob(chartYaml, c.name) - assert.NotNil(t, job) - assert.NoError(t, err) - - assert.Equal(t, job.Name, c.name) - assert.Equal(t, job.Spec.Template.Spec.Affinity, affinity) - case "Deployment": - dep, err := getDeployment(chartYaml, c.name) - assert.NotNil(t, dep) - assert.NoError(t, err) + }, + } - assert.Equal(t, dep.Name, c.name) - assert.Equal(t, dep.Spec.Template.Spec.Affinity, affinity) - case "StatefulSet": - ss, err := getStatefulSet(chartYaml, c.name) - assert.NotNil(t, ss) - assert.NoError(t, err) + affinityData, err := json.Marshal(expectAffinity) + if err != nil { + t.Fatal(err) + } - assert.Equal(t, ss.Name, c.name) - assert.Equal(t, ss.Spec.Template.Spec.Affinity, affinity) + for _, app := range allApps { + t.Run(app, func(t *testing.T) { + setAppOpt(opts, app, "nodeSelector.machineSize", "xlarge") + setAppOpt(opts, app, "nodeSelector.region", "us-west-2") + + setAppOpt(opts, app, "tolerations[0].key", "key1") + setAppOpt(opts, app, "tolerations[0].operator", "Equal") + setAppOpt(opts, app, "tolerations[0].value", "value1") + setAppOpt(opts, app, "tolerations[0].effect", "NoSchedule") + + setAppOpt(opts, app, "tolerations[1].key", "key2") + setAppOpt(opts, app, "tolerations[1].operator", "Equal") + setAppOpt(opts, app, "tolerations[1].value", "value2") + setAppOpt(opts, app, "tolerations[1].effect", "NoSchedule") + + opts.SetJsonValues[app+".affinity"] = string(affinityData) + + chartYaml := renderChart(t, opts) + spec := appPodSpec(chartYaml, app) + assert.Equal(t, expectSelector, spec.NodeSelector) + assert.Equal(t, expectTolerations, spec.Tolerations) + assert.Equal(t, expectAffinity, spec.Affinity) + }) + } +} - default: - t.Fatalf("unsupported resource kind: %s", c.kind) - } - }) - } - }) +func appPodSpec(chartYaml, appName string) corev1.PodSpec { + + // most resources follow this naming pattern + resourceName := "airbyte-" + appName + + switch appName { + case "pod-sweeper": + return getDeployment(chartYaml, "airbyte-pod-sweeper-pod-sweeper").Spec.Template.Spec + case "airbyte-bootloader": + return getPod(chartYaml, resourceName).Spec + case "keycloak-setup": + return getJob(chartYaml, resourceName).Spec.Template.Spec + case "keycloak", "db": + return getStatefulSet(chartYaml, resourceName).Spec.Template.Spec + default: + return getDeployment(chartYaml, resourceName).Spec.Template.Spec + } } diff --git a/charts/helm-tests/tests/utils.go b/charts/helm-tests/tests/utils.go deleted file mode 100644 index aeaafa87f27..00000000000 --- a/charts/helm-tests/tests/utils.go +++ /dev/null @@ -1,40 +0,0 @@ -package test - -type set[K comparable] map[K]struct{} - -func toStringSet(keys ...string) set[string] { - s := make(set[string]) - for _, k := range keys { - s[k] = struct{}{} - } - - return s -} - -func (s set[string]) keys() []string { - var ks []string - for k := range s { - ks = append(ks, k) - } - - return ks -} - -func (s set[string]) contains(k string) bool { - _, ok := s[k] - return ok -} - -func (s set[string]) union(other set[string]) set[string] { - ss := make(set[string]) - - for k := range s { - ss[k] = struct{}{} - } - - for k := range other { - ss[k] = struct{}{} - } - - return ss -} From c5baf02d3efccbeb424fd6b35980dd90c721536c Mon Sep 17 00:00:00 2001 From: Jonathan Pearlin Date: Mon, 30 Sep 2024 14:41:18 -0400 Subject: [PATCH 20/36] refactor: convert logging configuration to Logback (#14113) Co-authored-by: Cole Snodgrass --- airbyte-bootloader/build.gradle.kts | 6 + .../src/main/resources/application.yml | 10 +- airbyte-commons-server/build.gradle.kts | 9 +- .../errors/IdNotFoundKnownException.java | 5 +- .../commons/server/errors/KnownException.java | 14 +- .../InvalidInputExceptionHandler.java | 4 +- .../handlers/helpers/ConnectionMatcher.java | 8 +- .../handlers/helpers/DestinationMatcher.java | 6 +- .../handlers/helpers/SourceMatcher.java | 6 +- airbyte-commons-storage/build.gradle.kts | 3 + .../logback/AirbyteCloudStorageAppender.kt | 174 ++++++++ .../logback/AirbyteLogbackCustomConfigurer.kt | 405 ++++++++++++++++++ .../logging/logback/AirbyteLogbackUtils.kt | 39 ++ ...irbyteOperationsJobLogbackMessageLayout.kt | 47 ++ .../AirbytePlatformLogbackMessageLayout.kt | 126 ++++++ .../AirbyteStorageMDCBasedDiscriminator.kt | 21 + .../logging/logback/MaskedDataConverter.kt | 156 +++++++ .../airbyte/commons/storage/StorageClient.kt | 47 ++ .../ch.qos.logback.classic.spi.Configurator | 1 + .../AirbyteCloudStorageAppenderTest.kt | 192 +++++++++ .../AirbyteLogbackCustomConfigurerTest.kt | 213 +++++++++ ...teOperationsJobLogbackMessageLayoutTest.kt | 131 ++++++ ...AirbytePlatformLogbackMessageLayoutTest.kt | 224 ++++++++++ ...AirbyteStorageMDCBasedDiscriminatorTest.kt | 47 ++ .../logback/MaskedDataConverterTest.kt | 186 ++++++++ .../storage/StorageClientFactoryTest.kt | 24 +- .../commons/storage/StorageClientTest.kt | 132 +++++- .../test/resources/test_spec_secret_mask.yaml | 5 + airbyte-commons-worker/build.gradle.kts | 5 +- .../general/ReplicationWorkerTest.java | 6 + .../VersionedAirbyteStreamFactoryTest.java | 7 - .../io/airbyte/commons/envvar/EnvVar.kt | 12 +- .../io/airbyte/commons/envvar/EnvVarTest.kt | 17 + .../config-persistence/build.gradle.kts | 4 + .../build.gradle.kts | 6 + .../src/main/resources/application.yml | 11 +- .../build.gradle.kts | 7 + .../src/main/resources/application.yml | 14 +- .../build.gradle.kts | 9 + .../src/main/resources/application.properties | 1 - .../src/main/resources/application.yml | 14 +- airbyte-connector-sidecar/build.gradle.kts | 6 +- .../src/main/resources/application.yml | 11 + .../build.gradle.kts | 8 +- .../src/main/resources/application.yml | 11 + airbyte-cron/build.gradle.kts | 6 + .../src/main/resources/application.yml | 10 +- airbyte-data/build.gradle.kts | 5 + airbyte-featureflag-server/build.gradle.kts | 9 +- .../src/main/resources/application.yml | 11 + airbyte-keycloak-setup/build.gradle.kts | 7 + .../src/main/resources/application.yml | 11 + airbyte-metrics/reporter/build.gradle.kts | 6 + .../src/main/resources/application.yml | 11 +- airbyte-notification/build.gradle.kts | 6 +- .../notification/SlackNotificationClient.java | 8 +- .../job/tracker/TrackingMetadata.java | 8 +- airbyte-server/build.gradle.kts | 9 +- .../src/main/resources/application.yml | 6 + airbyte-workers/build.gradle.kts | 4 +- .../src/main/resources/application.yml | 11 +- airbyte-workload-api-server/build.gradle.kts | 7 +- .../src/main/resources/application.yml | 18 +- .../build.gradle.kts | 5 + .../src/main/resources/application.yml | 11 + airbyte-workload-launcher/build.gradle.kts | 8 +- .../src/main/resources/application.yml | 6 + deps.toml | 5 + 68 files changed, 2477 insertions(+), 101 deletions(-) create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt create mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt create mode 100644 airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator create mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt create mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt create mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt create mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt create mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt create mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt create mode 100644 airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml delete mode 100644 airbyte-connector-rollout-worker/src/main/resources/application.properties diff --git a/airbyte-bootloader/build.gradle.kts b/airbyte-bootloader/build.gradle.kts index 1ef19396d93..a64ecff2f4d 100644 --- a/airbyte-bootloader/build.gradle.kts +++ b/airbyte-bootloader/build.gradle.kts @@ -4,6 +4,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -23,6 +27,7 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-micronaut")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-config:init")) implementation(project(":oss:airbyte-config:specs")) implementation(project(":oss:airbyte-config:config-models")) @@ -37,6 +42,7 @@ dependencies { implementation(project(":oss:airbyte-persistence:job-persistence")) runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) diff --git a/airbyte-bootloader/src/main/resources/application.yml b/airbyte-bootloader/src/main/resources/application.yml index a41a655a056..c973bc04829 100644 --- a/airbyte-bootloader/src/main/resources/application.yml +++ b/airbyte-bootloader/src/main/resources/application.yml @@ -147,5 +147,11 @@ jooq: logger: levels: -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-commons-server/build.gradle.kts b/airbyte-commons-server/build.gradle.kts index b5f18a7d2e7..99890673170 100644 --- a/airbyte-commons-server/build.gradle.kts +++ b/airbyte-commons-server/build.gradle.kts @@ -3,6 +3,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -36,12 +40,13 @@ dependencies { implementation(libs.quartz.scheduler) implementation(libs.temporal.sdk) implementation(libs.swagger.annotations) - implementation(libs.bundles.log4j) implementation(libs.commons.io) + implementation(libs.apache.commons.lang) implementation(libs.kotlin.logging) implementation(libs.reactor.core) implementation(libs.jakarta.ws.rs.api) implementation(libs.kubernetes.client) + implementation(libs.guava) implementation(project(":oss:airbyte-analytics")) implementation(project(":oss:airbyte-api:connector-builder-api")) @@ -51,8 +56,8 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-converters")) implementation(project(":oss:airbyte-commons-license")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-protocol")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-with-dependencies")) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java index f5bcf007525..d5848ab1010 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java @@ -5,7 +5,6 @@ package io.airbyte.commons.server.errors; import io.airbyte.api.model.generated.NotFoundKnownExceptionInfo; -import org.apache.logging.log4j.core.util.Throwables; /** * Thrown when an api input requests an id that does not exist. @@ -46,10 +45,10 @@ public NotFoundKnownExceptionInfo getNotFoundKnownExceptionInfo() { final NotFoundKnownExceptionInfo exceptionInfo = new NotFoundKnownExceptionInfo() .exceptionClassName(this.getClass().getName()) .message(this.getMessage()) - .exceptionStack(Throwables.toStringList(this)); + .exceptionStack(getStackTraceAsList(this)); if (this.getCause() != null) { exceptionInfo.rootCauseExceptionClassName(this.getClass().getClass().getName()); - exceptionInfo.rootCauseExceptionStack(Throwables.toStringList(this.getCause())); + exceptionInfo.rootCauseExceptionStack(getStackTraceAsList(this.getCause())); } exceptionInfo.id(this.getId()); return exceptionInfo; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java index 4cffeee75d8..2d9db718b38 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java @@ -4,9 +4,12 @@ package io.airbyte.commons.server.errors; +import com.google.common.base.Throwables; import io.airbyte.api.model.generated.KnownExceptionInfo; +import java.util.List; import java.util.Map; -import org.apache.logging.log4j.core.util.Throwables; +import java.util.stream.Collectors; +import java.util.stream.Stream; /** * Exception wrapper to handle formatting API exception outputs nicely. @@ -45,6 +48,11 @@ public KnownExceptionInfo getKnownExceptionInfo() { return KnownException.infoFromThrowable(this, details); } + public static List getStackTraceAsList(final Throwable throwable) { + final String[] stackTrace = Throwables.getStackTraceAsString(throwable).split("\n"); + return Stream.of(stackTrace).collect(Collectors.toList()); + } + public static KnownExceptionInfo infoFromThrowableWithMessage(final Throwable t, final String message) { return infoFromThrowableWithMessage(t, message, null); // Call the other static method with null details } @@ -61,11 +69,11 @@ public static KnownExceptionInfo infoFromThrowableWithMessage(final Throwable t, final KnownExceptionInfo exceptionInfo = new KnownExceptionInfo() .exceptionClassName(t.getClass().getName()) .message(message) - .exceptionStack(Throwables.toStringList(t)); + .exceptionStack(getStackTraceAsList(t)); if (t.getCause() != null) { exceptionInfo.rootCauseExceptionClassName(t.getCause().getClass().getName()); - exceptionInfo.rootCauseExceptionStack(Throwables.toStringList(t.getCause())); + exceptionInfo.rootCauseExceptionStack(getStackTraceAsList(t.getCause())); } if (details != null) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java index e19e7ea88d4..b3e3f69a212 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java @@ -7,6 +7,7 @@ import io.airbyte.api.model.generated.InvalidInputExceptionInfo; import io.airbyte.api.model.generated.InvalidInputProperty; import io.airbyte.commons.json.Jsons; +import io.airbyte.commons.server.errors.KnownException; import io.micronaut.context.annotation.Replaces; import io.micronaut.context.annotation.Requires; import io.micronaut.http.HttpRequest; @@ -21,7 +22,6 @@ import jakarta.validation.ConstraintViolationException; import java.util.ArrayList; import java.util.List; -import org.apache.logging.log4j.core.util.Throwables; /** * https://www.baeldung.com/jersey-bean-validation#custom-exception-handler. handles exceptions @@ -53,7 +53,7 @@ public static InvalidInputExceptionInfo infoFromConstraints(final ConstraintViol final InvalidInputExceptionInfo exceptionInfo = new InvalidInputExceptionInfo() .exceptionClassName(cve.getClass().getName()) .message("Some properties contained invalid input.") - .exceptionStack(Throwables.toStringList(cve)); + .exceptionStack(KnownException.getStackTraceAsList(cve)); final List props = new ArrayList(); for (final ConstraintViolation cv : cve.getConstraintViolations()) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java index d56db348fde..73fa5e9bb42 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java @@ -6,7 +6,7 @@ import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.ConnectionSearch; -import org.apache.logging.log4j.util.Strings; +import org.apache.commons.lang3.StringUtils; /** * Constructs a query for finding a query. @@ -28,13 +28,13 @@ public ConnectionRead match(final ConnectionRead query) { final ConnectionRead fromSearch = new ConnectionRead(); fromSearch.connectionId(search.getConnectionId() == null ? query.getConnectionId() : search.getConnectionId()); fromSearch.destinationId(search.getDestinationId() == null ? query.getDestinationId() : search.getDestinationId()); - fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); - fromSearch.namespaceFormat(Strings.isBlank(search.getNamespaceFormat()) || "null".equals(search.getNamespaceFormat()) + fromSearch.name(StringUtils.isBlank(search.getName()) ? query.getName() : search.getName()); + fromSearch.namespaceFormat(StringUtils.isBlank(search.getNamespaceFormat()) || "null".equals(search.getNamespaceFormat()) ? query.getNamespaceFormat() : search.getNamespaceFormat()); fromSearch.namespaceDefinition( search.getNamespaceDefinition() == null ? query.getNamespaceDefinition() : search.getNamespaceDefinition()); - fromSearch.prefix(Strings.isBlank(search.getPrefix()) ? query.getPrefix() : search.getPrefix()); + fromSearch.prefix(StringUtils.isBlank(search.getPrefix()) ? query.getPrefix() : search.getPrefix()); fromSearch.schedule(search.getSchedule() == null ? query.getSchedule() : search.getSchedule()); fromSearch.scheduleType(search.getScheduleType() == null ? query.getScheduleType() : search.getScheduleType()); fromSearch.scheduleData(search.getScheduleData() == null ? query.getScheduleData() : search.getScheduleData()); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java index c59a24e8db9..fef914465a6 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.api.model.generated.DestinationRead; import io.airbyte.api.model.generated.DestinationSearch; -import org.apache.logging.log4j.util.Strings; +import org.apache.commons.lang3.StringUtils; /** * Constructs a query for finding a query. @@ -28,13 +28,13 @@ public DestinationRead match(final DestinationRead query) { } final DestinationRead fromSearch = new DestinationRead(); - fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); + fromSearch.name(StringUtils.isBlank(search.getName()) ? query.getName() : search.getName()); fromSearch.destinationDefinitionId(search.getDestinationDefinitionId() == null ? query.getDestinationDefinitionId() : search.getDestinationDefinitionId()); fromSearch .destinationId(search.getDestinationId() == null ? query.getDestinationId() : search.getDestinationId()); fromSearch.destinationName( - Strings.isBlank(search.getDestinationName()) ? query.getDestinationName() : search.getDestinationName()); + StringUtils.isBlank(search.getDestinationName()) ? query.getDestinationName() : search.getDestinationName()); fromSearch.workspaceId(search.getWorkspaceId() == null ? query.getWorkspaceId() : search.getWorkspaceId()); fromSearch.icon(query.getIcon()); fromSearch.isVersionOverrideApplied(query.getIsVersionOverrideApplied()); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java index 1dee4c333a2..cb212c5d5c4 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.api.model.generated.SourceRead; import io.airbyte.api.model.generated.SourceSearch; -import org.apache.logging.log4j.util.Strings; +import org.apache.commons.lang3.StringUtils; /** * Constructs a query for finding a query. @@ -28,10 +28,10 @@ public SourceRead match(final SourceRead query) { } final SourceRead fromSearch = new SourceRead(); - fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); + fromSearch.name(StringUtils.isBlank(search.getName()) ? query.getName() : search.getName()); fromSearch.sourceDefinitionId(search.getSourceDefinitionId() == null ? query.getSourceDefinitionId() : search.getSourceDefinitionId()); fromSearch.sourceId(search.getSourceId() == null ? query.getSourceId() : search.getSourceId()); - fromSearch.sourceName(Strings.isBlank(search.getSourceName()) ? query.getSourceName() : search.getSourceName()); + fromSearch.sourceName(StringUtils.isBlank(search.getSourceName()) ? query.getSourceName() : search.getSourceName()); fromSearch.workspaceId(search.getWorkspaceId() == null ? query.getWorkspaceId() : search.getWorkspaceId()); fromSearch.icon(query.getIcon()); fromSearch.isVersionOverrideApplied(query.getIsVersionOverrideApplied()); diff --git a/airbyte-commons-storage/build.gradle.kts b/airbyte-commons-storage/build.gradle.kts index 4fd322bddb7..ba858b28410 100644 --- a/airbyte-commons-storage/build.gradle.kts +++ b/airbyte-commons-storage/build.gradle.kts @@ -17,6 +17,8 @@ dependencies { api(libs.aws.java.sdk.sts) api(libs.s3) api(libs.google.cloud.storage) + api(libs.guava) + api(libs.slf4j.api) api(project(":oss:airbyte-commons")) api(project(":oss:airbyte-metrics:metrics-lib")) @@ -26,6 +28,7 @@ dependencies { // TODO: This is deprecated, but required to make the real van logging solution happy. implementation("com.microsoft.azure:azure-storage:8.6.6") implementation(libs.micronaut.inject) + implementation(libs.bundles.logback) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt new file mode 100644 index 00000000000..2794787f19f --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt @@ -0,0 +1,174 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.AppenderBase +import ch.qos.logback.core.encoder.Encoder +import com.google.common.util.concurrent.ThreadFactoryBuilder +import io.airbyte.commons.envvar.EnvVar +import io.airbyte.commons.storage.AzureStorageClient +import io.airbyte.commons.storage.AzureStorageConfig +import io.airbyte.commons.storage.DocumentType +import io.airbyte.commons.storage.GcsStorageClient +import io.airbyte.commons.storage.GcsStorageConfig +import io.airbyte.commons.storage.LocalStorageClient +import io.airbyte.commons.storage.LocalStorageConfig +import io.airbyte.commons.storage.MinioStorageClient +import io.airbyte.commons.storage.MinioStorageConfig +import io.airbyte.commons.storage.S3StorageClient +import io.airbyte.commons.storage.S3StorageConfig +import io.airbyte.commons.storage.StorageBucketConfig +import io.airbyte.commons.storage.StorageClient +import java.net.InetAddress +import java.time.LocalDateTime +import java.time.format.DateTimeFormatter +import java.util.UUID +import java.util.concurrent.Executors +import java.util.concurrent.LinkedBlockingQueue +import java.util.concurrent.TimeUnit + +/** + * Custom Logback [AppenderBase] that uploads log events to remove storage. Log data + * is uploaded on a scheduled cadence that produces a new remote storage file each time. + * This is necessary because most cloud storage systems do not support an append mode. + */ +class AirbyteCloudStorageAppender( + val encoder: Encoder, + val baseStorageId: String, + val documentType: DocumentType, + val storageClient: StorageClient = buildStorageClient(storageConfig = buildStorageConfig(), documentType = documentType), + val period: Long = 60L, + val unit: TimeUnit = TimeUnit.SECONDS, +) : AppenderBase() { + private val buffer = LinkedBlockingQueue() + private val hostname = InetAddress.getLocalHost().hostName + private val executorService = + Executors.newScheduledThreadPool( + 1, + ThreadFactoryBuilder().setNameFormat("airbyte-cloud-storage-appender-%d").build(), + ) + private val uniqueIdentifier = UUID.randomUUID().toString().replace("-", "") + private var currentStorageId: String = composeId() + + override fun start() { + super.start() + executorService.scheduleAtFixedRate(this::upload, period, period, unit) + } + + override fun stop() { + try { + super.stop() + executorService.shutdownNow() + executorService.awaitTermination(30, TimeUnit.SECONDS) + } finally { + // Do one final upload attempt to make sure all logs are published + upload() + } + } + + override fun append(eventObject: ILoggingEvent) { + buffer.offer(encoder.encode(eventObject).decodeToString()) + } + + private fun upload() { + val messages = mutableListOf() + buffer.drainTo(messages) + + if (messages.isNotEmpty()) { + storageClient.write(id = currentStorageId, document = messages.joinToString(separator = "")) + + // Move to next file to avoid overwriting in log storage that doesn't support append mode + this.currentStorageId = composeId() + } + } + + private fun composeId(): String { + // Remove the trailing "/" from the base storage ID if present to avoid duplicates in the storage ID + return "${baseStorageId.trimEnd('/')}/${LocalDateTime.now().format(DATE_FORMAT)}_${hostname}_$uniqueIdentifier" + } +} + +internal fun buildStorageClient( + documentType: DocumentType, + storageConfig: Map, +): StorageClient { + val storageType = storageConfig[EnvVar.STORAGE_TYPE] ?: "" + val bucketConfig = buildBucketConfig(storageConfig = storageConfig) + + return when (storageType.lowercase()) { + "azure" -> + AzureStorageClient( + config = + AzureStorageConfig( + buckets = bucketConfig, + connectionString = storageConfig[EnvVar.AZURE_STORAGE_CONNECTION_STRING]!!, + ), + type = documentType, + ) + "gcs" -> + GcsStorageClient( + config = + GcsStorageConfig( + buckets = bucketConfig, + applicationCredentials = storageConfig[EnvVar.GOOGLE_APPLICATION_CREDENTIALS]!!, + ), + type = documentType, + ) + "minio" -> + MinioStorageClient( + config = + MinioStorageConfig( + buckets = bucketConfig, + accessKey = storageConfig[EnvVar.AWS_ACCESS_KEY_ID]!!, + secretAccessKey = storageConfig[EnvVar.AWS_SECRET_ACCESS_KEY]!!, + endpoint = storageConfig[EnvVar.MINIO_ENDPOINT]!!, + ), + type = documentType, + ) + "s3" -> + S3StorageClient( + config = + S3StorageConfig( + buckets = bucketConfig, + accessKey = storageConfig[EnvVar.AWS_ACCESS_KEY_ID]!!, + secretAccessKey = storageConfig[EnvVar.AWS_SECRET_ACCESS_KEY]!!, + region = storageConfig[EnvVar.AWS_DEFAULT_REGION]!!, + ), + type = documentType, + ) + else -> + LocalStorageClient( + config = + LocalStorageConfig( + buckets = bucketConfig, + root = "/tmp/local-storage", + ), + type = documentType, + ) + } +} + +private val DATE_FORMAT: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyyMMddHHmmss") + +internal fun buildBucketConfig(storageConfig: Map): StorageBucketConfig = + StorageBucketConfig( + log = storageConfig[EnvVar.STORAGE_BUCKET_LOG] ?: throw IllegalArgumentException("Missing ${EnvVar.STORAGE_BUCKET_LOG.name} env-var"), + state = "", + workloadOutput = "", + activityPayload = "", + ) + +private fun buildStorageConfig(): Map = + mapOf( + EnvVar.STORAGE_TYPE to EnvVar.STORAGE_TYPE.fetchNotNull(), + EnvVar.STORAGE_BUCKET_LOG to EnvVar.STORAGE_BUCKET_LOG.fetchNotNull(), + EnvVar.AZURE_STORAGE_CONNECTION_STRING to EnvVar.AZURE_STORAGE_CONNECTION_STRING.fetchNotNull(), + EnvVar.GOOGLE_APPLICATION_CREDENTIALS to EnvVar.GOOGLE_APPLICATION_CREDENTIALS.fetchNotNull(), + EnvVar.AWS_ACCESS_KEY_ID to EnvVar.AWS_ACCESS_KEY_ID.fetchNotNull(), + EnvVar.AWS_SECRET_ACCESS_KEY to EnvVar.AWS_SECRET_ACCESS_KEY.fetchNotNull(), + EnvVar.AWS_DEFAULT_REGION to EnvVar.AWS_DEFAULT_REGION.fetchNotNull(), + EnvVar.MINIO_ENDPOINT to EnvVar.MINIO_ENDPOINT.fetchNotNull(), + ) diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt new file mode 100644 index 00000000000..cfe79a375c1 --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt @@ -0,0 +1,405 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.LoggerContext +import ch.qos.logback.classic.boolex.JaninoEventEvaluator +import ch.qos.logback.classic.sift.SiftingAppender +import ch.qos.logback.classic.spi.Configurator +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.Appender +import ch.qos.logback.core.ConsoleAppender +import ch.qos.logback.core.Context +import ch.qos.logback.core.FileAppender +import ch.qos.logback.core.Layout +import ch.qos.logback.core.boolex.EventEvaluator +import ch.qos.logback.core.encoder.Encoder +import ch.qos.logback.core.encoder.LayoutWrappingEncoder +import ch.qos.logback.core.filter.EvaluatorFilter +import ch.qos.logback.core.hook.DefaultShutdownHook +import ch.qos.logback.core.rolling.FixedWindowRollingPolicy +import ch.qos.logback.core.rolling.RollingFileAppender +import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy +import ch.qos.logback.core.sift.AppenderFactory +import ch.qos.logback.core.sift.Discriminator +import ch.qos.logback.core.spi.ContextAwareBase +import ch.qos.logback.core.spi.FilterReply +import ch.qos.logback.core.util.Duration +import ch.qos.logback.core.util.FileSize +import ch.qos.logback.core.util.StatusPrinter2 +import io.airbyte.commons.envvar.EnvVar +import io.airbyte.commons.logging.DEFAULT_CLOUD_JOB_LOG_PATH_MDC_KEY +import io.airbyte.commons.logging.DEFAULT_CLOUD_WORKSPACE_MDC_KEY +import io.airbyte.commons.logging.DEFAULT_JOB_LOG_PATH_MDC_KEY +import io.airbyte.commons.logging.DEFAULT_LOG_FILENAME +import io.airbyte.commons.logging.DEFAULT_WORKSPACE_MDC_KEY +import io.airbyte.commons.storage.DocumentType +import org.slf4j.Logger.ROOT_LOGGER_NAME +import java.io.File +import java.nio.file.Path +import kotlin.io.path.isDirectory + +/** + * Custom Logback [Configurator] that configures Logback appenders and loggers for use in the platform. This configurator allows us to + * dynamically control the output of each logger and apply any additional logic prior to logging the message. + */ +class AirbyteLogbackCustomConfigurer : + ContextAwareBase(), + Configurator { + override fun configure(loggerContext: LoggerContext): Configurator.ExecutionStatus { + // Ensure that the logging context is stopped on application shutdown + registerShutdownHook(loggerContext = loggerContext) + + // Output any configuration errors + StatusPrinter2().printInCaseOfErrorsOrWarnings(loggerContext) + + // Create appenders + val appenders = + listOf( + createPlatformAppender(loggerContext = loggerContext), + createOperationsJobAppender(loggerContext = loggerContext), + createApplicationAppender(loggerContext = loggerContext), + createCloudApplicationAppender(loggerContext = loggerContext), + createCloudOperationsJobAppender(loggerContext = loggerContext), + ) + + // Register appenders with root logger + loggerContext.getLogger(ROOT_LOGGER_NAME).apply { + level = getLogLevel() + isAdditive = true + appenders.forEach { addAppender(it) } + } + + // Do not allow any other configurators to run after this. + // This prevents Logback from creating the default console appender for the root logger. + return Configurator.ExecutionStatus.DO_NOT_INVOKE_NEXT_IF_ANY + } + + /** + * Builds the appender for application log messages. This appender logs all messages to a rolling local file. + * + * @param loggerContext The logging context. + * @return The application appender. + */ + private fun createApplicationAppender(loggerContext: LoggerContext): Appender { + return createSiftingAppender( + appenderFactory = this::createApplicationRollingAppender, + appenderName = APPLICATION_LOGGER_NAME, + contextKey = DEFAULT_WORKSPACE_MDC_KEY, + loggerContext = loggerContext, + ) + } + + /** + * Builds a [RollingFileAppender] for application logs. + * + * @param context The logging context. + * @param discriminatorValue The discriminator value used to select this appender. + * @return A [RollingFileAppender] configured for the application logs. + */ + internal fun createApplicationRollingAppender( + context: Context, + discriminatorValue: String, + ): Appender { + val baseFile = "$discriminatorValue/$DEFAULT_LOG_FILENAME" + + // Ensure that the file exists before logging + touchFile(file = baseFile) + + val appender = RollingFileAppender() + appender.context = context + appender.name = "$discriminatorValue-local" + appender.encoder = createEncoder(context = context, layout = AirbytePlatformLogbackMessageLayout()) + appender.file = baseFile + + val triggeringPolicy = SizeBasedTriggeringPolicy() + triggeringPolicy.context = context + triggeringPolicy.maxFileSize = FileSize.valueOf(DEFAULT_MAX_LOG_FILE_SIZE) + triggeringPolicy.start() + + val rollingPolicy = FixedWindowRollingPolicy() + rollingPolicy.context = context + rollingPolicy.fileNamePattern = baseFile.replace(LOG_FILE_EXTENSION, ROLLING_FILE_NAME_PATTERN) + rollingPolicy.maxIndex = 3 + rollingPolicy.setParent(appender) + rollingPolicy.start() + + appender.rollingPolicy = rollingPolicy + appender.triggeringPolicy = triggeringPolicy + appender.start() + return appender + } + + /** + * Builds the cloud appender for application log messages. This appender logs all messages to remote storage. + * + * @param loggerContext The logging context. + * @return The cloud application appender. + */ + private fun createCloudApplicationAppender(loggerContext: LoggerContext): Appender { + val appenderFactory = { context: Context, discriminatorValue: String -> + createCloudAppender( + context = context, + discriminatorValue = discriminatorValue, + layout = AirbytePlatformLogbackMessageLayout(), + documentType = DocumentType.APPLICATION_LOGS, + appenderName = CLOUD_APPLICATION_LOGGER_NAME, + ) + } + + return createSiftingAppender( + appenderFactory = appenderFactory, + appenderName = CLOUD_APPLICATION_LOGGER_NAME, + contextKey = DEFAULT_CLOUD_WORKSPACE_MDC_KEY, + loggerContext = loggerContext, + ) + } + + /** + * Builds the appender for cloud operations job log messages. This appender logs all messages to remote storage. + * + * @param loggerContext The logging context. + * @return The cloud operations job appender. + */ + private fun createCloudOperationsJobAppender(loggerContext: LoggerContext): Appender { + val appenderFactory = { context: Context, discriminatorValue: String -> + createCloudAppender( + context = context, + discriminatorValue = discriminatorValue, + layout = AirbyteOperationsJobLogbackMessageLayout(), + documentType = DocumentType.LOGS, + appenderName = CLOUD_OPERATIONS_JOB_LOGGER_NAME, + ) + } + + return createSiftingAppender( + appenderFactory = appenderFactory, + appenderName = CLOUD_OPERATIONS_JOB_LOGGER_NAME, + contextKey = DEFAULT_CLOUD_JOB_LOG_PATH_MDC_KEY, + loggerContext = loggerContext, + ) + } + + /** + * Builds the appender for operations job log messages. This appender logs all messages to a local file. + * + * @param loggerContext The logging context. + * @return The operations job appender. + */ + private fun createOperationsJobAppender(loggerContext: LoggerContext): Appender { + return createSiftingAppender( + appenderFactory = this::createOperationsJobFileAppender, + appenderName = OPERATIONS_JOB_LOGGER_NAME, + contextKey = DEFAULT_JOB_LOG_PATH_MDC_KEY, + loggerContext = loggerContext, + ) + } + + /** + * Builds the operations job file appender for operations job log messages. + * + * @param context The logging context. + * @param discriminatorValue The discriminator value used to select this appender. + * @return A [FileAppender] configured for the operations job logs. + */ + internal fun createOperationsJobFileAppender( + context: Context, + discriminatorValue: String, + ): Appender { + val filePath = + if (Path.of(discriminatorValue).isDirectory()) { + Path.of(discriminatorValue, DEFAULT_LOG_FILENAME) + } else { + Path.of(discriminatorValue) + } + + // Ensure that the log file exists + touchFile(file = filePath.toString()) + + val appender = FileAppender() + appender.context = context + appender.encoder = createEncoder(context = context, layout = AirbyteOperationsJobLogbackMessageLayout()) + appender.file = filePath.toString() + appender.name = "$discriminatorValue-local" + appender.start() + return appender + } + + /** + * Builds an [AirbyteCloudStorageAppender] for remote logging of log messages. + * + * @param context The logging context. + * @param discriminatorValue The discriminator value used to select this appender. + * @param documentType The remote storage [DocumentType]. + * @param appenderName The base appender name. + * @param layout The log message [Layout]. + * @return An [AirbyteCloudStorageAppender] used to store logs remotely. + */ + internal fun createCloudAppender( + context: Context, + discriminatorValue: String, + documentType: DocumentType, + appenderName: String, + layout: Layout, + ): AirbyteCloudStorageAppender { + val appender = + AirbyteCloudStorageAppender( + encoder = createEncoder(context = context, layout = layout), + baseStorageId = discriminatorValue, + documentType = documentType, + ) + appender.context = context + appender.name = "$appenderName-$discriminatorValue" + appender.start() + return appender + } + + /** + * Builds the appender for platform log messages. This appender logs all messages to the console. + * + * @param loggerContext The logging context. + * @return The platform appender. + */ + internal fun createPlatformAppender(loggerContext: LoggerContext): ConsoleAppender = + ConsoleAppender().apply { + context = loggerContext + encoder = createEncoder(context = loggerContext, layout = AirbytePlatformLogbackMessageLayout()) + name = PLATFORM_LOGGER_NAME + start() + } + + /** + * Builds a [Discriminator] that is used to extract a value from the logging MDC. + * + * @param contextKey The key in the MDC that will be extracted if present and not blank. + * @param loggerContext The logging context. + * @return The [Discriminator]. + */ + private fun createDiscriminator( + contextKey: String, + loggerContext: LoggerContext, + ): Discriminator = + AirbyteStorageMDCBasedDiscriminator(mdcValueExtractor = { mdc -> mdc[contextKey] ?: "" }).apply { + context = loggerContext + start() + } + + /** + * Builds the [Encoder] used to format the logging event message. + * + * @param context The logging [Context]. + * @param layout The logging message [Layout] to be applied to the message. + * @return The [Encoder]. + */ + private fun createEncoder( + context: Context, + layout: Layout, + ): Encoder { + layout.context = context + layout.start() + + return LayoutWrappingEncoder().apply { + this.context = context + this.layout = layout + } + } + + /** + * Builds an [EventEvaluator] that tests whether the MDC contains a non-blank value + * for the provided `contextKey`. This evaluator is used to avoid routing logging + * events to the [SiftingAppender] when the event does not contain the required MDC property. + * + * @param contextKey The key in the MDC to be checked. + * @param loggerContext The logging context. + * @return The [EventEvaluator] that checks the provided `contextKey` in the MDC. + */ + private fun createEvaluator( + contextKey: String, + loggerContext: LoggerContext, + ): EventEvaluator = + JaninoEventEvaluator().apply { + context = loggerContext + expression = """mdc.get("$contextKey") == null || mdc.get("$contextKey") == """"" + start() + } + + /** + * Builds an [EvaluatorFilter] that denys matching the logging event + * to the [SiftingAppender] if the provided [EventEvaluator] expression + * returns `true`. This is used to avoid routing events with missing MDC properties + * to the appender. + * + * @param evaluator An [EventEvaluator] to be used by the filter. + * @param loggerContext The logging context. + * @return An [EvaluatorFilter] that denies matches when the provided evaluator results in a `true` comparison. + */ + private fun createFilter( + evaluator: EventEvaluator, + loggerContext: LoggerContext, + ): EvaluatorFilter = + EvaluatorFilter().apply { + context = loggerContext + this.evaluator = evaluator + onMatch = FilterReply.DENY + onMismatch = FilterReply.NEUTRAL + start() + } + + /** + * Builds a [SiftingAppender] that is invoked when the provided `contextKey` is present + * in the MDC. Once created, the appender will expire after disuse to ensure proper cleanup. + * + * @param appenderFactory An [AppenderFactory] used to create an appender when the logging event matches the provided filter. + * @param contextKey The key in the MDC that is used to filter logging events. + * @param appenderName The name to apply to the appender. + * @param loggerContext The logging context. + * @return A [SiftingAppender] that creates dynamic appenders based on the value returned by a [Discriminator]. + */ + internal fun createSiftingAppender( + appenderFactory: AppenderFactory, + contextKey: String, + appenderName: String, + loggerContext: LoggerContext, + ): SiftingAppender { + val discriminator = createDiscriminator(contextKey = contextKey, loggerContext = loggerContext) + val evaluator = createEvaluator(contextKey = contextKey, loggerContext = loggerContext) + val filter = createFilter(evaluator = evaluator, loggerContext = loggerContext) + + return SiftingAppender().apply { + setAppenderFactory(appenderFactory) + context = loggerContext + this.discriminator = discriminator + name = appenderName + timeout = Duration.valueOf("$APPENDER_TIMEOUT minutes") + addFilter(filter) + start() + } + } + + /** + * Registers a shutdown hook with the JVM to ensure that the logging context is stopped + * on JVM exit. This ensures that any active appender is stopped, allowing them to + * publish any pending logging events. + * + * @param loggerContext The logging context. + */ + private fun registerShutdownHook(loggerContext: LoggerContext) { + val shutdownHook = DefaultShutdownHook().apply { context = loggerContext } + Runtime.getRuntime().addShutdownHook(Thread { shutdownHook.run() }) + } +} + +private const val DEFAULT_APPENDER_TIMEOUT_MIN = "15" +const val DEFAULT_MAX_LOG_FILE_SIZE = "100MB" +const val LOG_FILE_EXTENSION = ".log" +const val ROLLING_FILE_NAME_PATTERN = ".%i$LOG_FILE_EXTENSION.gz" +val APPENDER_TIMEOUT = EnvVar.LOG_IDLE_ROUTE_TTL.fetchNotNull(default = DEFAULT_APPENDER_TIMEOUT_MIN) + +private fun getLogLevel(): Level = Level.toLevel(EnvVar.LOG_LEVEL.fetchNotNull(default = Level.INFO.toString())) + +private fun touchFile(file: String) { + File(file).createNewFile() +} diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt new file mode 100644 index 00000000000..29b16709b53 --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt @@ -0,0 +1,39 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +/** + * The qualified class name of the calling code for logging purposes. This + * key should be added to the MDC when the point of logging is not the point + * in the code that should be recorded by the logger layout. + */ +const val CALLER_QUALIFIED_CLASS_NAME_PATTERN = "CALLER_FQCN" + +/** + * The line number of the calling code for logging purposes. This + * key should be added to the MDC when the point of logging is not the point + * in the code that should be recorded by the logger layout. + */ +const val CALLER_LINE_NUMBER_PATTERN = "CALLER_LINE_NUMBER" + +/** + * The method name of the calling code for logging purposes. This + * key should be added to the MDC when the point of logging is not the point + * in the code that should be recorded by the logger layout. + */ +const val CALLER_METHOD_NAME_PATTERN = "CALLER_METHOD_NAME" + +/** + * The thread name of the calling code for logging purposes. This + * key should be added to the MDC when the point of logging is not the point + * in the code that should be recorded by the logger layout. + */ +const val CALLER_THREAD_NAME_PATTERN = "CALLER_THREAD_NAME" + +const val APPLICATION_LOGGER_NAME = "airbyte-application-logger" +const val CLOUD_APPLICATION_LOGGER_NAME = "airbyte-cloud-application-logger" +const val CLOUD_OPERATIONS_JOB_LOGGER_NAME = "airbyte-cloud-operations-job-logger" +const val OPERATIONS_JOB_LOGGER_NAME = "airbyte-operations-job-logger" +const val PLATFORM_LOGGER_NAME = "airbyte-platform-logger" diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt new file mode 100644 index 00000000000..0b33ff8e465 --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.pattern.ThrowableProxyConverter +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR +import ch.qos.logback.core.LayoutBase +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.LoggingHelper.LOG_SOURCE_MDC_KEY +import java.time.Instant +import java.time.ZoneId +import java.time.format.DateTimeFormatter + +/** + * Custom Logback message layout that formats the message for operations job log messages. + */ +class AirbyteOperationsJobLogbackMessageLayout : LayoutBase() { + private val throwableConverter = ThrowableProxyConverter() + private val maskedDataConverter = MaskedDataConverter() + + init { + throwableConverter.start() + } + + override fun doLayout(loggingEvent: ILoggingEvent): String { + val logSource = loggingEvent.mdcPropertyMap.getOrDefault(LOG_SOURCE_MDC_KEY, LoggingHelper.platformLogSource()) + + return buildString { + append( + Instant.ofEpochMilli(loggingEvent.timeStamp).atZone(UTC_ZONE_ID).format(EVENT_TIMESTAMP_FORMATTER), + ) + append(" ") + append("$logSource > ") + append(maskedDataConverter.convert(event = loggingEvent)) + loggingEvent.throwableProxy?.let { + append("$LINE_SEPARATOR${throwableConverter.convert(loggingEvent)}") + } + append(LINE_SEPARATOR) + } + } +} + +private val UTC_ZONE_ID = ZoneId.of("UTC") +private val EVENT_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt new file mode 100644 index 00000000000..a4b2b29aae3 --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt @@ -0,0 +1,126 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.pattern.ClassOfCallerConverter +import ch.qos.logback.classic.pattern.LineOfCallerConverter +import ch.qos.logback.classic.pattern.MethodOfCallerConverter +import ch.qos.logback.classic.pattern.ThreadConverter +import ch.qos.logback.classic.pattern.ThrowableProxyConverter +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.CoreConstants.DASH_CHAR +import ch.qos.logback.core.CoreConstants.ISO8601_PATTERN +import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR +import ch.qos.logback.core.CoreConstants.TAB +import ch.qos.logback.core.LayoutBase +import ch.qos.logback.core.pattern.color.ANSIConstants +import ch.qos.logback.core.pattern.color.ANSIConstants.DEFAULT_FG +import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_END +import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_START +import ch.qos.logback.core.pattern.color.ANSIConstants.RESET +import java.time.Instant +import java.time.ZoneId +import java.time.format.DateTimeFormatter + +/** + * Custom Logback message layout that formats the message for platform log messages (e.g. STDOUT). + */ +class AirbytePlatformLogbackMessageLayout : LayoutBase() { + private val classOfCallerConverter = ClassOfCallerConverter() + private val lineOfCallerConverter = LineOfCallerConverter() + private val methodOfCallerConverter = MethodOfCallerConverter() + private val threadConverter = ThreadConverter() + private val throwableConverter = ThrowableProxyConverter() + private val maskedDataConverter = MaskedDataConverter() + private val ciMode = System.getProperty(CI_MODE_SYSTEM_PROPERTY, "false").toBoolean() + + init { + throwableConverter.start() + } + + override fun doLayout(loggingEvent: ILoggingEvent): String = + buildString { + append( + Instant.ofEpochMilli(loggingEvent.timeStamp).atZone(UTC_ZONE_ID).format(EVENT_TIMESTAMP_FORMATTER), + ) + + append(" ") + + /* + * Add DataDog trace/span ID's to log messages if CI mode is enabled and the log + * message is not for the job log. + */ + if (ciMode) { + append( + "[dd.trace_id=${loggingEvent.mdcPropertyMap[DATADOG_TRACE_ID_KEY]} " + + "dd.span_id=${loggingEvent.mdcPropertyMap[DATADOG_SPAN_ID_KEY]}] ", + ) + } + + append("[") + if (loggingEvent.mdcPropertyMap.containsKey(CALLER_THREAD_NAME_PATTERN)) { + append(loggingEvent.mdcPropertyMap[CALLER_THREAD_NAME_PATTERN]) + } else { + append(threadConverter.convert(loggingEvent)) + } + append("]$TAB") + append("$ESC_START${getHighlightColor(loggingEvent = loggingEvent)}$ESC_END${loggingEvent.level}$DEFAULT_COLOR$TAB") + if (loggingEvent.mdcPropertyMap.containsKey(CALLER_QUALIFIED_CLASS_NAME_PATTERN)) { + append( + "${formatClassName(loggingEvent.mdcPropertyMap[CALLER_QUALIFIED_CLASS_NAME_PATTERN])}" + + "(${loggingEvent.mdcPropertyMap[CALLER_METHOD_NAME_PATTERN]}):" + + "${loggingEvent.mdcPropertyMap[CALLER_LINE_NUMBER_PATTERN]} $DASH_CHAR ", + ) + } else { + append( + "${formatClassName(classOfCallerConverter.convert(loggingEvent))}(${methodOfCallerConverter.convert(loggingEvent)})" + + ":${lineOfCallerConverter.convert(loggingEvent)} $DASH_CHAR ", + ) + } + append(maskedDataConverter.convert(event = loggingEvent)) + loggingEvent.throwableProxy?.let { + append("$LINE_SEPARATOR${throwableConverter.convert(loggingEvent)}") + } + append(LINE_SEPARATOR) + } +} + +internal const val DEFAULT_COLOR = ESC_START + RESET + DEFAULT_FG + ESC_END +internal const val CI_MODE_SYSTEM_PROPERTY = "ciMode" +internal const val DATADOG_SPAN_ID_KEY = "dd.span_id" +internal const val DATADOG_TRACE_ID_KEY = "dd.trace_id" +private val EVENT_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern(ISO8601_PATTERN) +private val UTC_ZONE_ID = ZoneId.of("UTC") + +/** + * Formats the fully qualified class name to mimic the same behavior as the ``{1.}`` option + * in a Log4j pattern layout format string. + * + * @param className a fully qualified class name + * @returns The formatted fully qualified class name. + */ +internal fun formatClassName(className: String?): String? { + return className?.let { + val parts = className.split('.') + return "${parts.subList(0, parts.size - 1).joinToString(".") { s -> s.substring(0, 1) }}.${parts.last()}" + } +} + +/** + * Returns the appropriate highlight color based on the level associated with the provided logging event. + * This method is adapted from [ch.qos.logback.classic.pattern.color.HighlightingCompositeConverter] used + * by Logback to color levels in log output. + * + * @param loggingEvent The logging event that contains the log level. + * @return The ANSI color code associated with the log level. + */ +private fun getHighlightColor(loggingEvent: ILoggingEvent): String = + when (loggingEvent.level.toInt()) { + Level.ERROR_INT -> ANSIConstants.BOLD + ANSIConstants.RED_FG + Level.WARN_INT -> ANSIConstants.RED_FG + Level.INFO_INT -> ANSIConstants.BLUE_FG + else -> DEFAULT_FG + } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt new file mode 100644 index 00000000000..fe88ddb8d64 --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt @@ -0,0 +1,21 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.sift.AbstractDiscriminator + +/** + * Custom Logback [ch.qos.logback.core.sift.Discriminator] implementation that uses the + * job log path MDC value as a discriminator for appender creation. + */ +class AirbyteStorageMDCBasedDiscriminator( + private val mdcValueExtractor: (Map) -> String, +) : AbstractDiscriminator() { + // Not implemented/not used. + override fun getKey(): String = "" + + override fun getDiscriminatingValue(event: ILoggingEvent): String = mdcValueExtractor(event.mdcPropertyMap) +} diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt new file mode 100644 index 00000000000..544ef7b328a --- /dev/null +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt @@ -0,0 +1,156 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.pattern.ClassicConverter +import ch.qos.logback.classic.spi.ILoggingEvent +import com.fasterxml.jackson.core.type.TypeReference +import io.airbyte.commons.constants.AirbyteCatalogConstants.LOCAL_SECRETS_MASKS_PATH +import io.airbyte.commons.constants.AirbyteSecretConstants +import io.airbyte.commons.yaml.Yamls +import java.nio.charset.Charset +import java.util.regex.Pattern + +/** + * Custom [ClassicConverter] used to intercept all log messages and mask any JSON + * properties in the message that match the list of maskable properties. + *

+ * The maskable properties file is generated by a Gradle task in the + * {@code :oss:airbyte-config:specs} project. The file is named {@code specs_secrets_mask.yaml} and + * is located in the {@code src/main/resources/seed} directory of the + * {@code :oss:airbyte-config:init} project. + */ +class MaskedDataConverter( + specMaskFile: String = LOCAL_SECRETS_MASKS_PATH, +) : ClassicConverter() { + private val pattern: Pattern? = buildPattern(specMaskFile = specMaskFile) + private val replacePattern = API_KEY_PATTERN.toPattern() + + override fun convert(event: ILoggingEvent): String = replace(message = applyMask(message = event.formattedMessage)) + + /** + * Applies the mask to the message, if necessary. + * + * @param message The log message. + * @return The possibly masked log message. + */ + private fun applyMask(message: String): String { + val piiScrubbedMessage = removeKnownPii(message) + return pattern?.matcher(piiScrubbedMessage)?.replaceAll(REPLACEMENT_PATTERN) ?: piiScrubbedMessage + } + + /** + * Builds the maskable property matching pattern. + * + * @param specMaskFile The spec mask file. + * @return The regular expression pattern used to find maskable properties. + */ + private fun buildPattern(specMaskFile: String): Pattern? = + getMaskableProperties(specMaskFile).takeIf { it.isNotEmpty() }?.let { + generatePattern(it).toPattern() + } + + /** + * Generates the property matching pattern string from the provided set of properties. + * + * @param properties The set of properties to match. + * @return The generated regular expression pattern used to match the maskable properties. + */ + private fun generatePattern(properties: Set): String = + buildString { + append(CASE_INSENSITIVE_FLAG) + append(PROPERTY_MATCHING_PATTERN_PREFIX) + append(properties.joinToString("|")) + append(PROPERTY_MATCHING_PATTERN_SUFFIX) + } + + /** + * Loads the maskable properties from the provided file. + * + * @param specMaskFile The spec mask file. + * @return The set of maskable properties. + */ + private fun getMaskableProperties(specMaskFile: String): Set { + return runCatching { + val maskFileContents = + javaClass.getResourceAsStream(specMaskFile)?.readBytes()?.toString(Charset.defaultCharset()) + ?: return setOf() + + val properties: Map> = + Yamls.deserialize(maskFileContents, object : TypeReference>>() {}) + + properties.getOrDefault(PROPERTIES_KEY, setOf()) + }.getOrDefault(setOf()) + } + + /** + * Code-based implementation of the `replace(message){r, t}` macro in Logback/Log4j configuration. + * + * @param message The message to apply replacement to. + * @return The potentially modified message with any speciric patterns replaced. + */ + private fun replace(message: String): String = replacePattern.matcher(message).replaceAll(API_KEY_REPLACEMENT) +} + +/** + * Regular expression to match api keys in strings. Ported from previous Log4j2 configuration. + */ +private const val API_KEY_PATTERN = """apikey=[\w\-]*""" + +/** + * Replacement pattern for matches using the [API_KEY_PATTERN] regular expression. + */ +private const val API_KEY_REPLACEMENT = "apikey=${AirbyteSecretConstants.SECRETS_MASK}" + +/** + * Regular expression pattern flag that enables case in-sensitive matching. + */ +private const val CASE_INSENSITIVE_FLAG: String = "(?i)" + +// This is a little circuitous, but it gets the regex syntax highlighting in intelliJ to work. +private val DESTINATION_ERROR_PREFIX: String = """^(?.*destination.*\s+>\s+ERROR.+)""".toPattern().pattern() + +/** + * Regular expression replacement pattern for applying the mask to PII log messages. + */ +private const val KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN: String = "\${destinationPrefix}\${messagePrefix}${AirbyteSecretConstants.SECRETS_MASK}" + +private val KNOWN_PII_PATTERNS: List = + listOf( + """$DESTINATION_ERROR_PREFIX(?Received\s+invalid\s+message:)(.+)$""".toPattern(), + """$DESTINATION_ERROR_PREFIX(?org\.jooq\.exception\.DataAccessException: SQL.+values\s+\()(.+)$""".toPattern(), + ) + +/** + * Name of the key in the mask YAML file that contains the list of maskable properties. + */ +private const val PROPERTIES_KEY: String = "properties" + +/** + * Regular expression pattern prefix for applying the mask to property values. + */ +private const val PROPERTY_MATCHING_PATTERN_PREFIX: String = """"(""" + +/** + * Regular expression pattern suffix for applying the mask to property values. + */ +private const val PROPERTY_MATCHING_PATTERN_SUFFIX: String = """)"\s*:\s*("(?:[^"\\]|\\.)*"|\[[^]\[]*]|\d+)""" + +/** + * Regular expression pattern used to replace a key/value property with a masked value while + * maintaining the property key/name. + */ +private const val REPLACEMENT_PATTERN: String = """"$1":"${AirbyteSecretConstants.SECRETS_MASK}"""" + +/** + * Removes known PII from the message. + * + * @param message the log line + * @return a redacted log line + */ +private fun removeKnownPii(message: String): String = + KNOWN_PII_PATTERNS.fold(message) { msg, pattern -> + pattern.matcher(msg).replaceAll(KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN) + } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt index 10118b5e14b..b7a79e944b1 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt @@ -9,6 +9,7 @@ import com.azure.storage.blob.BlobServiceClientBuilder import com.google.auth.oauth2.ServiceAccountCredentials import com.google.cloud.storage.BlobId import com.google.cloud.storage.BlobInfo +import com.google.cloud.storage.BucketInfo import com.google.cloud.storage.Storage import com.google.cloud.storage.StorageOptions import com.google.common.annotations.VisibleForTesting @@ -24,8 +25,10 @@ import software.amazon.awssdk.auth.credentials.AwsBasicCredentials import software.amazon.awssdk.core.sync.RequestBody import software.amazon.awssdk.regions.Region import software.amazon.awssdk.services.s3.S3Client +import software.amazon.awssdk.services.s3.model.CreateBucketRequest import software.amazon.awssdk.services.s3.model.DeleteObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectRequest +import software.amazon.awssdk.services.s3.model.HeadBucketRequest import software.amazon.awssdk.services.s3.model.HeadObjectRequest import software.amazon.awssdk.services.s3.model.NoSuchKeyException import software.amazon.awssdk.services.s3.model.PutObjectRequest @@ -68,6 +71,7 @@ class StorageClientFactory( enum class DocumentType( val prefix: Path, ) { + APPLICATION_LOGS(prefix = Path.of("/app-logging")), LOGS(prefix = Path.of("/job-logging")), STATE(prefix = Path.of("/state")), WORKLOAD_OUTPUT(prefix = Path.of("/workload/output")), @@ -128,6 +132,10 @@ class AzureStorageClient( @Parameter type: DocumentType, ) : this(config = config, type = type, azureClient = config.azureClient()) + init { + runCatching { createBucketIfNotExists() } + } + override fun write( id: String, document: String, @@ -154,6 +162,13 @@ class AzureStorageClient( .deleteIfExists() internal fun key(id: String): String = "${type.prefix}/$id" + + private fun createBucketIfNotExists() { + val blobContainerClient = azureClient.getBlobContainerClient(bucketName) + if (!blobContainerClient.exists()) { + blobContainerClient.createIfNotExists() + } + } } /** @@ -177,6 +192,10 @@ class GcsStorageClient( @Parameter type: DocumentType, ) : this(config = config, type = type, gcsClient = config.gcsClient()) + init { + runCatching { createBucketIfNotExists() } + } + override fun write( id: String, document: String, @@ -200,6 +219,12 @@ class GcsStorageClient( @VisibleForTesting internal fun blobId(id: String): BlobId = BlobId.of(bucketName, key(id)) + + private fun createBucketIfNotExists() { + if (gcsClient.get(bucketName) == null) { + gcsClient.create(BucketInfo.of(bucketName)) + } + } } /** @@ -294,6 +319,10 @@ abstract class AbstractS3StorageClient internal constructor( ) : StorageClient { private val bucketName = config.bucketName(type) + init { + runCatching { createBucketIfNotExists() } + } + override fun write( id: String, document: String, @@ -348,6 +377,23 @@ abstract class AbstractS3StorageClient internal constructor( } internal fun key(id: String): String = "${type.prefix}/$id" + + private fun createBucketIfNotExists() { + if (!doesBucketExist(bucketName=bucketName)) { + val createBucketRequest = CreateBucketRequest.builder().bucket(bucketName).build() + s3Client.createBucket(createBucketRequest) + } + } + + private fun doesBucketExist(bucketName: String): Boolean { + val headBucketRequest = HeadBucketRequest.builder().bucket(bucketName).build() + return try { + s3Client.headBucket(headBucketRequest) + true + } catch (e: Exception) { + false + } + } } /** @@ -417,6 +463,7 @@ fun StorageConfig.bucketName(type: DocumentType): String = when (type) { DocumentType.STATE -> this.buckets.state DocumentType.WORKLOAD_OUTPUT -> this.buckets.workloadOutput + DocumentType.APPLICATION_LOGS -> this.buckets.log DocumentType.LOGS -> this.buckets.log DocumentType.ACTIVITY_PAYLOADS -> this.buckets.activityPayload } diff --git a/airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator b/airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator new file mode 100644 index 00000000000..2ed89deda99 --- /dev/null +++ b/airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator @@ -0,0 +1 @@ +io.airbyte.commons.logging.logback.AirbyteLogbackCustomConfigurer \ No newline at end of file diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt new file mode 100644 index 00000000000..1e31a1d54dd --- /dev/null +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt @@ -0,0 +1,192 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.Context +import ch.qos.logback.core.encoder.Encoder +import ch.qos.logback.core.status.Status +import ch.qos.logback.core.status.StatusManager +import io.airbyte.commons.envvar.EnvVar +import io.airbyte.commons.resources.MoreResources +import io.airbyte.commons.storage.AzureStorageClient +import io.airbyte.commons.storage.DocumentType +import io.airbyte.commons.storage.GcsStorageClient +import io.airbyte.commons.storage.LocalStorageClient +import io.airbyte.commons.storage.MinioStorageClient +import io.airbyte.commons.storage.S3StorageClient +import io.airbyte.commons.storage.StorageClient +import io.mockk.every +import io.mockk.mockk +import io.mockk.verify +import org.junit.jupiter.api.AfterEach +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.nio.file.Files +import java.util.concurrent.TimeUnit +import kotlin.io.path.Path + +private class AirbyteCloudStorageAppenderTest { + @AfterEach + fun tearDown() { + Files.newDirectoryStream(Path("."), "*.log").use { stream -> + stream.forEach { Files.deleteIfExists(it) } + } + } + + @Test + fun testBuildBucketConfig() { + val bucket = "test-bucket" + val storageConfig = + mapOf( + EnvVar.STORAGE_BUCKET_LOG to bucket, + ) + val bucketConfig = buildBucketConfig(storageConfig) + assertEquals(bucket, bucketConfig.log) + assertEquals("", bucketConfig.state) + assertEquals("", bucketConfig.workloadOutput) + assertEquals("", bucketConfig.activityPayload) + } + + @Test + fun testBuildAzureStorageClient() { + val bucket = "test-bucket" + val connectionString = "AccountName=test;AccountKey=test-key" + val storageType = "azure" + val storageConfig = + mapOf( + EnvVar.STORAGE_TYPE to storageType, + EnvVar.STORAGE_BUCKET_LOG to bucket, + EnvVar.AZURE_STORAGE_CONNECTION_STRING to connectionString, + ) + val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) + assertEquals(AzureStorageClient::class.java, client.javaClass) + } + + @Test + fun testBuildGcsStorageClient() { + val bucket = "test-bucket" + val applicationCredentials = MoreResources.readResourceAsFile("sample_gcs_credentials.json") + val credentials = applicationCredentials.path + val storageType = "gcs" + val storageConfig = + mapOf( + EnvVar.STORAGE_TYPE to storageType, + EnvVar.STORAGE_BUCKET_LOG to bucket, + EnvVar.GOOGLE_APPLICATION_CREDENTIALS to credentials, + ) + val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) + assertEquals(GcsStorageClient::class.java, client.javaClass) + } + + @Test + fun testBuildMinioStorageClient() { + val bucket = "test-bucket" + val accessKey = "test_access_key" + val accessSecretKey = "test_access_secret_key" + val endpoint = "test-endpoint:9000" + val storageType = "minio" + val storageConfig = + mapOf( + EnvVar.STORAGE_TYPE to storageType, + EnvVar.STORAGE_BUCKET_LOG to bucket, + EnvVar.AWS_ACCESS_KEY_ID to accessKey, + EnvVar.AWS_SECRET_ACCESS_KEY to accessSecretKey, + EnvVar.MINIO_ENDPOINT to endpoint, + ) + val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) + assertEquals(MinioStorageClient::class.java, client.javaClass) + } + + @Test + fun testBuildS3StorageClient() { + val bucket = "test-bucket" + val accessKey = "test_access_key" + val accessSecretKey = "test_access_secret_key" + val region = "US-EAST-1" + val storageType = "s3" + val storageConfig = + mapOf( + EnvVar.STORAGE_TYPE to storageType, + EnvVar.STORAGE_BUCKET_LOG to bucket, + EnvVar.AWS_ACCESS_KEY_ID to accessKey, + EnvVar.AWS_SECRET_ACCESS_KEY to accessSecretKey, + EnvVar.AWS_DEFAULT_REGION to region, + ) + val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) + assertEquals(S3StorageClient::class.java, client.javaClass) + } + + @Test + fun testBuildLocalStorageClient() { + val bucket = "test-bucket" + val storageType = "local" + val storageConfig = + mapOf( + EnvVar.STORAGE_TYPE to storageType, + EnvVar.STORAGE_BUCKET_LOG to bucket, + ) + val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) + assertEquals(LocalStorageClient::class.java, client.javaClass) + } + + @Test + fun testBuildDefaultStorageClient() { + val bucket = "test-bucket" + val storageType = "unknown" + val storageConfig = + mapOf( + EnvVar.STORAGE_TYPE to storageType, + EnvVar.STORAGE_BUCKET_LOG to bucket, + ) + val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) + assertEquals(LocalStorageClient::class.java, client.javaClass) + + val storageConfig2 = + mapOf( + EnvVar.STORAGE_BUCKET_LOG to bucket, + ) + val client2 = buildStorageClient(storageConfig = storageConfig2, documentType = DocumentType.LOGS) + assertEquals(LocalStorageClient::class.java, client2.javaClass) + } + + @Test + fun testStorageUpload() { + val baseStorageId = "/path/to/logs" + val storageClient = mockk() + val event = mockk() + val period = 1L + val statusManager = + mockk { + every { add(any()) } returns Unit + } + val context = + mockk { + every { getStatusManager() } returns statusManager + } + val encoder = + mockk> { + every { encode(any()) } returns "some test log message".toByteArray(Charsets.UTF_8) + } + + val appender = + AirbyteCloudStorageAppender( + documentType = DocumentType.LOGS, + storageClient = storageClient, + baseStorageId = baseStorageId, + encoder = encoder, + period = period, + unit = TimeUnit.SECONDS, + ) + appender.context = context + appender.start() + + appender.doAppend(event) + + Thread.sleep(TimeUnit.SECONDS.toMillis(period * 2)) + + verify(exactly = 1) { storageClient.write(any(), any()) } + } +} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt new file mode 100644 index 00000000000..baa8004f2f6 --- /dev/null +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt @@ -0,0 +1,213 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.LoggerContext +import ch.qos.logback.classic.sift.SiftingAppender +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.core.Context +import ch.qos.logback.core.FileAppender +import ch.qos.logback.core.OutputStreamAppender +import ch.qos.logback.core.encoder.LayoutWrappingEncoder +import ch.qos.logback.core.rolling.FixedWindowRollingPolicy +import ch.qos.logback.core.rolling.RollingFileAppender +import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy +import ch.qos.logback.core.sift.AppenderFactory +import ch.qos.logback.core.status.Status +import ch.qos.logback.core.status.StatusManager +import ch.qos.logback.core.util.Duration +import ch.qos.logback.core.util.FileSize +import io.airbyte.commons.logging.DEFAULT_LOG_FILENAME +import io.airbyte.commons.storage.DocumentType +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.nio.file.Files +import java.nio.file.Path +import kotlin.io.path.exists +import kotlin.io.path.pathString + +private class AirbyteLogbackCustomConfigurerTest { + private lateinit var configurer: AirbyteLogbackCustomConfigurer + + @BeforeEach + fun setUp() { + configurer = AirbyteLogbackCustomConfigurer() + } + + @Test + fun testCreateApplicationRollingAppender() { + val context = + mockk { + every { getObject(any()) } returns mutableMapOf() + every { statusManager } returns + mockk { + every { add(any()) } returns Unit + } + } + val discriminatorValue = Files.createTempDirectory("test-1").pathString + val appender = configurer.createApplicationRollingAppender(context = context, discriminatorValue = discriminatorValue) + + assertEquals(RollingFileAppender::class.java, appender.javaClass) + assertEquals(context, appender.context) + assertEquals("$discriminatorValue-local", appender.name) + assertEquals( + AirbytePlatformLogbackMessageLayout::class.java, + ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, + ) + assertEquals("$discriminatorValue/$DEFAULT_LOG_FILENAME", (appender as RollingFileAppender).file) + + assertEquals(FixedWindowRollingPolicy::class.java, appender.rollingPolicy.javaClass) + assertEquals( + "$discriminatorValue/$DEFAULT_LOG_FILENAME".replace(LOG_FILE_EXTENSION, ROLLING_FILE_NAME_PATTERN), + (appender.rollingPolicy as FixedWindowRollingPolicy).fileNamePattern, + ) + assertEquals(3, (appender.rollingPolicy as FixedWindowRollingPolicy).maxIndex) + + assertEquals(SizeBasedTriggeringPolicy::class.java, appender.triggeringPolicy.javaClass) + assertEquals(FileSize.valueOf(DEFAULT_MAX_LOG_FILE_SIZE), (appender.triggeringPolicy as SizeBasedTriggeringPolicy).maxFileSize) + + assertTrue(appender.isStarted) + assertTrue(Path.of(appender.file).exists()) + } + + @Test + fun testCreateOperationsJobFileAppender() { + val context = + mockk { + every { getObject(any()) } returns mutableMapOf() + every { statusManager } returns + mockk { + every { add(any()) } returns Unit + } + } + val discriminatorValue = Files.createTempDirectory("test-2").pathString + val appender = configurer.createOperationsJobFileAppender(context = context, discriminatorValue = discriminatorValue) + + assertEquals(FileAppender::class.java, appender.javaClass) + assertEquals(context, appender.context) + assertEquals("$discriminatorValue/$DEFAULT_LOG_FILENAME", (appender as FileAppender).file) + assertEquals("$discriminatorValue-local", appender.name) + assertEquals( + AirbyteOperationsJobLogbackMessageLayout::class.java, + ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, + ) + + assertTrue(appender.isStarted) + assertTrue(Path.of(appender.file).exists()) + } + + @Test + fun testCreateOperationsJobFileAppenderWithFileDiscriminator() { + val context = + mockk { + every { getObject(any()) } returns mutableMapOf() + every { statusManager } returns + mockk { + every { add(any()) } returns Unit + } + } + val discriminatorValue = Files.createTempFile("test-2", "other.log").pathString + val appender = configurer.createOperationsJobFileAppender(context = context, discriminatorValue = discriminatorValue) + + assertEquals(FileAppender::class.java, appender.javaClass) + assertEquals(context, appender.context) + assertEquals(discriminatorValue, (appender as FileAppender).file) + assertEquals("$discriminatorValue-local", appender.name) + assertEquals( + AirbyteOperationsJobLogbackMessageLayout::class.java, + ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, + ) + + assertTrue(appender.isStarted) + assertTrue(Path.of(appender.file).exists()) + } + + @Test + fun testCreatePlatformConsoleAppender() { + val context = + mockk { + every { getObject(any()) } returns mutableMapOf() + every { statusManager } returns + mockk { + every { add(any()) } returns Unit + } + } + val appender = configurer.createPlatformAppender(loggerContext = context) + assertEquals(context, appender.context) + assertEquals(PLATFORM_LOGGER_NAME, appender.name) + assertEquals( + AirbytePlatformLogbackMessageLayout::class.java, + ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, + ) + + assertTrue(appender.isStarted) + } + + @Test + fun testCreateAirbyteCloudStorageAppender() { + val context = + mockk { + every { getObject(any()) } returns mutableMapOf() + every { statusManager } returns + mockk { + every { add(any()) } returns Unit + } + } + val appenderName = "test-appender" + val discriminatorValue = "/workspace/1" + val documentType = DocumentType.LOGS + val layout = AirbytePlatformLogbackMessageLayout() + val appender = + configurer.createCloudAppender( + context = context, + discriminatorValue = discriminatorValue, + documentType = documentType, + appenderName = appenderName, + layout = layout, + ) + + assertEquals(AirbyteCloudStorageAppender::class.java, appender.javaClass) + assertEquals(context, appender.context) + assertEquals("$appenderName-$discriminatorValue", appender.name) + assertEquals(documentType, appender.documentType) + assertEquals(layout.javaClass, (appender.encoder as LayoutWrappingEncoder).layout.javaClass) + assertEquals(discriminatorValue, appender.baseStorageId) + + assertTrue(appender.isStarted) + } + + @Test + fun testCreateSiftingAppender() { + val loggerContext = + mockk { + every { getObject(any()) } returns mutableMapOf() + every { statusManager } returns + mockk { + every { add(any()) } returns Unit + } + } + val appenderFactory = mockk>() + val appenderName = "test-appender" + val contextKey = "test-context-key" + val appender = + configurer.createSiftingAppender( + appenderFactory = appenderFactory, + contextKey = contextKey, + appenderName = appenderName, + loggerContext = loggerContext, + ) + + assertEquals(SiftingAppender::class.java, appender.javaClass) + assertEquals(loggerContext, appender.context) + assertEquals(appenderName, appender.name) + assertEquals(Duration.valueOf("$APPENDER_TIMEOUT minutes").milliseconds, appender.timeout.milliseconds) + + assertTrue(appender.isStarted) + } +} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt new file mode 100644 index 00000000000..e2db0c4316d --- /dev/null +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt @@ -0,0 +1,131 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.pattern.ThrowableProxyConverter +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.classic.spi.ThrowableProxy +import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR +import io.airbyte.commons.constants.AirbyteSecretConstants.SECRETS_MASK +import io.airbyte.commons.logging.LoggingHelper +import io.airbyte.commons.logging.LoggingHelper.LOG_SOURCE_MDC_KEY +import io.airbyte.commons.logging.LoggingHelper.SOURCE_LOGGER_PREFIX +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Test +import java.util.UUID + +private class AirbyteOperationsJobLogbackMessageLayoutTest { + @Test + fun testLogMessage() { + val logSource = LoggingHelper.applyColor(LoggingHelper.Color.BLUE_BACKGROUND, SOURCE_LOGGER_PREFIX) + val context = mapOf(LOG_SOURCE_MDC_KEY to logSource) + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val logMessage = "test message" + val logThreadName = "Test Thread" + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns OPERATIONS_JOB_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns null + every { timeStamp } returns timestamp + } + + val layout = AirbyteOperationsJobLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00 ") + expected.append(logSource) + expected.append(" > $logMessage$LINE_SEPARATOR") + assertEquals(expected.toString(), message) + } + + @Test + fun testLogMessageWithMaskedData() { + val logSource = LoggingHelper.applyColor(LoggingHelper.Color.BLUE_BACKGROUND, SOURCE_LOGGER_PREFIX) + val context = mapOf(LOG_SOURCE_MDC_KEY to logSource) + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val apiKey = UUID.randomUUID().toString() + val logMessage = "test message (\"api_token\":\"$apiKey\")" + val logThreadName = "Test Thread" + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns OPERATIONS_JOB_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns null + every { timeStamp } returns timestamp + } + + val layout = AirbyteOperationsJobLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00 ") + expected.append(logSource) + expected.append(" > ${logMessage.replace(apiKey, SECRETS_MASK)}$LINE_SEPARATOR") + assertEquals(expected.toString(), message) + } + + @Test + fun testLogMessageWithException() { + val throwableConverter = ThrowableProxyConverter() + throwableConverter.start() + + val logSource = LoggingHelper.applyColor(LoggingHelper.Color.BLUE_BACKGROUND, SOURCE_LOGGER_PREFIX) + val context = mapOf(LOG_SOURCE_MDC_KEY to logSource) + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val logMessage = "test message" + val logThreadName = "Test Thread" + val timestamp = 0L + val exception = RuntimeException("test", NullPointerException("root")) + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns OPERATIONS_JOB_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns ThrowableProxy(exception) + every { timeStamp } returns timestamp + } + + val layout = AirbyteOperationsJobLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00 ") + expected.append(logSource) + expected.append(" > $logMessage") + expected.append("$LINE_SEPARATOR${throwableConverter.convert(event)}") + expected.append(LINE_SEPARATOR) + assertEquals(expected.toString(), message) + } +} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt new file mode 100644 index 00000000000..c91336931eb --- /dev/null +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt @@ -0,0 +1,224 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.Level +import ch.qos.logback.classic.pattern.ThrowableProxyConverter +import ch.qos.logback.classic.spi.ILoggingEvent +import ch.qos.logback.classic.spi.ThrowableProxy +import ch.qos.logback.core.CoreConstants.DASH_CHAR +import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR +import ch.qos.logback.core.CoreConstants.TAB +import ch.qos.logback.core.pattern.color.ANSIConstants +import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_END +import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_START +import io.airbyte.commons.constants.AirbyteSecretConstants.SECRETS_MASK +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.UUID + +private class AirbytePlatformLogbackMessageLayoutTest { + @BeforeEach + fun setup() { + System.setProperty(CI_MODE_SYSTEM_PROPERTY, "false") + } + + @Test + fun testCiModeLogMessage() { + System.setProperty(CI_MODE_SYSTEM_PROPERTY, "true") + val spanId = UUID.randomUUID().toString() + val traceId = UUID.randomUUID().toString() + val context = + mapOf( + DATADOG_SPAN_ID_KEY to spanId, + DATADOG_TRACE_ID_KEY to traceId, + ) + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val logMessage = "test message" + val logThreadName = "Test Thread" + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns PLATFORM_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns null + every { timeStamp } returns timestamp + } + + val layout = AirbytePlatformLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00,000 [dd.trace_id=$traceId dd.span_id=$spanId] ") + expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") + expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") + expected.append("$logMessage$LINE_SEPARATOR") + assertEquals(expected.toString(), message) + } + + @Test + fun tesLogMessage() { + val context = emptyMap() + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val logMessage = "test message" + val logThreadName = "Test Thread" + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns PLATFORM_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns null + every { timeStamp } returns timestamp + } + + val layout = AirbytePlatformLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00,000 ") + expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") + expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") + expected.append("$logMessage$LINE_SEPARATOR") + assertEquals(expected.toString(), message) + } + + @Test + fun tesLogMessageWithMaskedData() { + val context = emptyMap() + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val apiKey = UUID.randomUUID().toString() + val logMessage = "test message (\"api_token\":\"$apiKey\")" + val logThreadName = "Test Thread" + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns PLATFORM_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns null + every { timeStamp } returns timestamp + } + + val layout = AirbytePlatformLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00,000 ") + expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") + expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") + expected.append("${logMessage.replace(apiKey, SECRETS_MASK)}$LINE_SEPARATOR") + assertEquals(expected.toString(), message) + } + + @Test + fun tesLogMessageWithException() { + val throwableConverter = ThrowableProxyConverter() + throwableConverter.start() + + val context = emptyMap() + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.ERROR + val logMessage = "test message" + val logThreadName = "Test Thread" + val exception = RuntimeException("test", NullPointerException("root")) + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns PLATFORM_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns ThrowableProxy(exception) + every { timeStamp } returns timestamp + } + + val layout = AirbytePlatformLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00,000 ") + expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BOLD + ANSIConstants.RED_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") + expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") + expected.append(logMessage) + expected.append("$LINE_SEPARATOR${throwableConverter.convert(event)}") + expected.append(LINE_SEPARATOR) + assertEquals(expected.toString(), message) + } + + @Test + fun tesLogMessageWithCallerContext() { + val callerClassName = "io.airbyte.CallerTestClass" + val callerMethodName = "callerTestMethod" + val callerLineNumber = "999" + val callerThreadName = "Caller Test Thread" + val context = + mapOf( + CALLER_QUALIFIED_CLASS_NAME_PATTERN to callerClassName, + CALLER_METHOD_NAME_PATTERN to callerMethodName, + CALLER_LINE_NUMBER_PATTERN to callerLineNumber, + CALLER_THREAD_NAME_PATTERN to callerThreadName, + ) + val className = "io.airbyte.TestClass" + val methodName = "testMethod" + val fileName = "TestClass.kt" + val lineNumber = 12345 + val logLevel = Level.INFO + val logMessage = "test message" + val logThreadName = "Test Thread" + val timestamp = 0L + val event = + mockk { + every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) + every { formattedMessage } returns logMessage + every { level } returns logLevel + every { loggerName } returns PLATFORM_LOGGER_NAME + every { mdcPropertyMap } returns context + every { threadName } returns logThreadName + every { throwableProxy } returns null + every { timeStamp } returns timestamp + } + + val layout = AirbytePlatformLogbackMessageLayout() + val message = layout.doLayout(loggingEvent = event) + + val expected = StringBuilder() + expected.append("1970-01-01 00:00:00,000 ") + expected.append("[$callerThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") + expected.append("${formatClassName(callerClassName)}($callerMethodName):$callerLineNumber $DASH_CHAR ") + expected.append("$logMessage$LINE_SEPARATOR") + assertEquals(expected.toString(), message) + } +} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt new file mode 100644 index 00000000000..9ca44ca52dd --- /dev/null +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt @@ -0,0 +1,47 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.spi.ILoggingEvent +import io.airbyte.commons.logging.DEFAULT_JOB_LOG_PATH_MDC_KEY +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +private class AirbyteStorageMDCBasedDiscriminatorTest { + private lateinit var discriminator: AirbyteStorageMDCBasedDiscriminator + + @BeforeEach + fun setup() { + discriminator = AirbyteStorageMDCBasedDiscriminator(mdcValueExtractor = { mdc -> mdc[DEFAULT_JOB_LOG_PATH_MDC_KEY] ?: "" }) + } + + @Test + fun testLoggingEventWithPathInContext() { + val jobPath = "/some/job/path" + val context = mapOf(DEFAULT_JOB_LOG_PATH_MDC_KEY to jobPath) + val loggingEvent = + mockk { + every { mdcPropertyMap } returns context + } + + val discriminatorValue = discriminator.getDiscriminatingValue(event = loggingEvent) + assertEquals(jobPath, discriminatorValue) + } + + @Test + fun testLoggingEventWithEmptyContext() { + val context = emptyMap() + val loggingEvent = + mockk { + every { mdcPropertyMap } returns context + } + + val discriminatorValue = discriminator.getDiscriminatingValue(event = loggingEvent) + assertEquals("", discriminatorValue) + } +} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt new file mode 100644 index 00000000000..89a9c1a6acb --- /dev/null +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt @@ -0,0 +1,186 @@ +/* + * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. + */ + +package io.airbyte.commons.logging.logback + +import ch.qos.logback.classic.spi.ILoggingEvent +import io.airbyte.commons.constants.AirbyteSecretConstants +import io.airbyte.commons.json.Jsons +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.Assertions +import org.junit.jupiter.api.Assertions.assertEquals +import org.junit.jupiter.api.Assertions.assertFalse +import org.junit.jupiter.api.Assertions.assertNotEquals +import org.junit.jupiter.api.Assertions.assertTrue +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test + +private class MaskedDataConverterTest { + private lateinit var converter: MaskedDataConverter + + @BeforeEach + fun setup() { + converter = MaskedDataConverter(specMaskFile = TEST_SPEC_SECRET_MASK_YAML) + } + + @Test + fun testMaskingMessageWithStringSecret() { + val loggingEvent = + mockk { + every { formattedMessage } returns JSON_WITH_STRING_SECRETS + } + + val result = converter.convert(event = loggingEvent) + + val json = Jsons.deserialize(result) + assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()) + assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()) + } + + @Test + fun testMaskingMessageWithStringSecretWithQuotes() { + val loggingEvent = + mockk { + every { formattedMessage } returns JSON_WITH_STRING_WITH_QUOTE_SECRETS + } + + val result = converter.convert(event = loggingEvent) + + val json = Jsons.deserialize(result) + assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()) + assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()) + } + + @Test + fun testMaskingMessageWithNumberSecret() { + val loggingEvent = + mockk { + every { formattedMessage } returns JSON_WITH_NUMBER_SECRETS + } + + val result = converter.convert(event = loggingEvent) + + val json = Jsons.deserialize(result) + assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()) + assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()) + } + + @Test + fun testMaskingMessageWithWithoutSecrets() { + val loggingEvent = + mockk { + every { formattedMessage } returns JSON_WITHOUT_SECRETS + } + + val result = converter.convert(event = loggingEvent) + + val json = Jsons.deserialize(result) + assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json["prop1"].asText()) + assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("prop2").asText()) + assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("prop3").asText()) + } + + @Test + fun testMaskingMessageThatDoesNotMatchPattern() { + val message = "This is some log message that doesn't match the pattern." + val loggingEvent = + mockk { + every { formattedMessage } returns message + } + + val result = converter.convert(event = loggingEvent) + + assertFalse(result.contains(AirbyteSecretConstants.SECRETS_MASK)) + assertEquals(message, result) + } + + @Test + fun testMissingMaskingFileDoesNotPreventLogging() { + val logEvent = + mockk { + every { formattedMessage } returns JSON_WITHOUT_SECRETS + } + + Assertions.assertDoesNotThrow { + val converter = MaskedDataConverter(specMaskFile = "/does_not_exist.yaml") + val result = converter.convert(event = logEvent) + assertEquals(JSON_WITHOUT_SECRETS, result) + } + } + + @Test + fun testMaskingMessageWithSqlValues() { + val loggingEvent = + mockk { + every { formattedMessage } returns TEST_LOGGED_SQL_VALUES + } + + val result = converter.convert(event = loggingEvent) + + assertEquals(REDACTED_LOGGED_SQL_VALUES, result) + } + + @Test + fun testMaskingMessageWithRecordContents() { + val loggingEvent = + mockk { + every { formattedMessage } returns TEST_LOGGED_RECORD_CONTENTS + } + + val result = converter.convert(event = loggingEvent) + + assertEquals(REDACTED_LOGGED_RECORD_CONTENTS, result) + } + + @Test + fun testMaskingPlainTextLogLine() { + val message = "500 Server Error: Internal Server Error for url: https://localhost/api/v1/search?limit=100&archived=false&hapikey=secret-key_1" + val loggingEvent = + mockk { + every { formattedMessage } returns message + } + val result = converter.convert(event = loggingEvent) + assertFalse(result.contains("apikey=secret-key_1")) + assertTrue(result.contains("apikey=${AirbyteSecretConstants.SECRETS_MASK}")) + } + + companion object { + private const val FOO: String = "foo" + private const val OTHER: String = "other" + private const val JSON_WITH_STRING_SECRETS = "{\"$FOO\":\"test\",\"$OTHER\":{\"prop\":\"value\",\"bar\":\"1234\"}}" + private const val JSON_WITH_NUMBER_SECRETS = "{\"$FOO\":\"test\",\"$OTHER\":{\"prop\":\"value\",\"bar\":1234}}" + private const val JSON_WITH_STRING_WITH_QUOTE_SECRETS = "{\"$FOO\":\"\\\"test\\\"\",\"$OTHER\":{\"prop\":\"value\",\"bar\":\"1234\"}}" + private const val JSON_WITHOUT_SECRETS = "{\"prop1\":\"test\",\"$OTHER\":{\"prop2\":\"value\",\"prop3\":1234}}" + private const val REDACTED_LOGGED_SQL_VALUES = + ( + "2024-03-19 20:03:43 \u001B[43mdestination\u001B[0m > ERROR pool-4-thread-1 i.a.c.i.d.a.FlushWorkers(flush\$lambda$6):192 " + + "Flush Worker (632c9) -- flush worker " + + "error: java.lang.RuntimeException: org.jooq.exception.DataAccessException: SQL [insert into " + + "\"airbyte_internal\".\"public_raw__stream_foo\" (_airbyte_raw_id, _airbyte_data, _airbyte_meta, _airbyte_extracted_at, " + + "_airbyte_loaded_at) values (${AirbyteSecretConstants.SECRETS_MASK}" + ) + private const val REDACTED_LOGGED_RECORD_CONTENTS: String = + ( + "2024-03-21 12:19:08 \u001B[43mdestination\u001B[0m > ERROR i.a.c.i.b.Destination" + + "\$ShimToSerializedAirbyteMessageConsumer(consumeMessage):120" + + " Received invalid message:${AirbyteSecretConstants.SECRETS_MASK}" + ) + private const val TEST_LOGGED_SQL_VALUES: String = + ( + "2024-03-19 20:03:43 \u001B[43mdestination\u001B[0m > ERROR pool-4-thread-1 " + + "i.a.c.i.d.a.FlushWorkers(flush\$lambda\$6):192 Flush Worker (632c9) -- flush worker " + + "error: java.lang.RuntimeException: org.jooq.exception.DataAccessException: SQL [insert into " + + "\"airbyte_internal\".\"public_raw__stream_foo\" (_airbyte_raw_id, _airbyte_data, _airbyte_meta, _airbyte_extracted_at, " + + "_airbyte_loaded_at) values ('UUID', a bunch of other stuff" + ) + private const val TEST_LOGGED_RECORD_CONTENTS: String = + ( + "2024-03-21 12:19:08 \u001B[43mdestination\u001B[0m > ERROR " + + "i.a.c.i.b.Destination\$ShimToSerializedAirbyteMessageConsumer(consumeMessage):120 " + + "Received invalid message: {\"type\":\"RECORD\",\"record\":{\"namespace\":\"" + ) + private const val TEST_SPEC_SECRET_MASK_YAML = "/test_spec_secret_mask.yaml" + } +} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt index 971474d4455..09f41441d11 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt @@ -4,6 +4,8 @@ package io.airbyte.commons.storage +import com.google.cloud.storage.Bucket +import com.google.cloud.storage.BucketInfo import com.google.cloud.storage.Storage import io.micronaut.context.annotation.Bean import io.micronaut.context.annotation.Primary @@ -17,6 +19,10 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.Test import software.amazon.awssdk.services.s3.S3Client +import software.amazon.awssdk.services.s3.model.CreateBucketRequest +import software.amazon.awssdk.services.s3.model.CreateBucketResponse +import software.amazon.awssdk.services.s3.model.HeadBucketRequest +import software.amazon.awssdk.services.s3.model.NoSuchBucketException /** * Note @MockBean doesn't work in this class for some reason, possible due to a Micronaut 3 problem. @@ -66,7 +72,11 @@ class GcsStorageClientFactoryTest { every { applicationCredentials } returns "mock-app-creds" } - val gcsClient: Storage = mockk() + val gcsClient: Storage = + mockk { + every { get(any(), *anyVararg()) } returns null + every { create(any()) } returns mockk() + } init { mockkStatic(GcsStorageConfig::gcsClient) @@ -97,7 +107,11 @@ class MinioStorageClientFactoryTest { every { endpoint } returns "mock-endpoint" } - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } init { mockkStatic(MinioStorageConfig::s3Client) @@ -128,7 +142,11 @@ class S3StorageClientFactoryTest { every { region } returns "mock-region" } - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } init { mockkStatic(S3StorageConfig::s3Client) diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt index 9ff5ea3ff17..711a61259a7 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt @@ -6,10 +6,12 @@ package io.airbyte.commons.storage import com.azure.core.util.BinaryData import com.azure.storage.blob.BlobClient +import com.azure.storage.blob.BlobContainerClient import com.azure.storage.blob.BlobServiceClient import com.google.cloud.storage.Blob import com.google.cloud.storage.BlobId import com.google.cloud.storage.BlobInfo +import com.google.cloud.storage.BucketInfo import com.google.cloud.storage.Storage import io.mockk.every import io.mockk.mockk @@ -24,15 +26,20 @@ import org.junit.jupiter.api.io.TempDir import software.amazon.awssdk.core.ResponseBytes import software.amazon.awssdk.core.sync.RequestBody import software.amazon.awssdk.services.s3.S3Client +import software.amazon.awssdk.services.s3.model.CreateBucketRequest +import software.amazon.awssdk.services.s3.model.CreateBucketResponse import software.amazon.awssdk.services.s3.model.DeleteObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectResponse +import software.amazon.awssdk.services.s3.model.HeadBucketRequest import software.amazon.awssdk.services.s3.model.HeadObjectRequest +import software.amazon.awssdk.services.s3.model.NoSuchBucketException import software.amazon.awssdk.services.s3.model.NoSuchKeyException import software.amazon.awssdk.services.s3.model.PutObjectRequest import java.io.InputStream import java.nio.charset.StandardCharsets import java.nio.file.Path +import com.google.cloud.storage.Bucket as GcsBucket private const val KEY = "a" private const val DOC1 = "hello" @@ -55,6 +62,11 @@ class AzureStorageClientTest { @Test fun `key matches`() { val azureClient: BlobServiceClient = mockk() + val blobContainerClient: BlobContainerClient = mockk() + + every { azureClient.getBlobContainerClient(any()) } returns blobContainerClient + every { blobContainerClient.exists() } returns false + every { blobContainerClient.createIfNotExists() } returns true val clientState = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) assertEquals("/state/$KEY", clientState.key(KEY)) @@ -66,6 +78,12 @@ class AzureStorageClientTest { @Test fun `read missing doc`() { val azureClient: BlobServiceClient = mockk() + val blobContainerClient: BlobContainerClient = mockk() + + every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient + every { blobContainerClient.exists() } returns false + every { blobContainerClient.createIfNotExists() } returns true + val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns @@ -82,6 +100,12 @@ class AzureStorageClientTest { @Test fun `read existing doc`() { val azureClient: BlobServiceClient = mockk() + val blobContainerClient: BlobContainerClient = mockk() + + every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient + every { blobContainerClient.exists() } returns false + every { blobContainerClient.createIfNotExists() } returns true + val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns @@ -105,15 +129,19 @@ class AzureStorageClientTest { @Test fun `write doc`() { val azureClient: BlobServiceClient = mockk() - var blobClient: BlobClient = mockk() - val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) + val blobClient: BlobClient = mockk() + val blobContainerClient: BlobContainerClient = mockk() - every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns - mockk { every { getBlobClient(client.key(KEY)) } returns blobClient } + every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient + every { blobContainerClient.exists() } returns false + every { blobContainerClient.createIfNotExists() } returns true every { blobClient.exists() } returns true every { blobClient.upload(any()) } returns Unit + val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) + every { blobContainerClient.getBlobClient(client.key(KEY)) } returns blobClient + client.write(KEY, DOC1) verify { blobClient.upload(any()) } } @@ -121,6 +149,12 @@ class AzureStorageClientTest { @Test fun `delete doc`() { val azureClient: BlobServiceClient = mockk() + val blobContainerClient: BlobContainerClient = mockk() + + every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient + every { blobContainerClient.exists() } returns false + every { blobContainerClient.createIfNotExists() } returns true + val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) // doc not deleted @@ -150,7 +184,11 @@ class GcsStorageClientTest { @Test fun `blobId matches`() { - val gcsClient: Storage = mockk() + val gcsClient: Storage = + mockk { + every { get(any(), *anyVararg()) } returns null + every { create(any()) } returns mockk() + } val clientState = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) assertEquals(BlobId.of(buckets.state, "/state/$KEY"), clientState.blobId(KEY)) @@ -161,7 +199,11 @@ class GcsStorageClientTest { @Test fun `read missing doc`() { - val gcsClient: Storage = mockk() + val gcsClient: Storage = + mockk { + every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null + every { create(any()) } returns mockk() + } val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) // verify no blob is returned @@ -178,7 +220,11 @@ class GcsStorageClientTest { @Test fun `read existing doc`() { - val gcsClient: Storage = mockk() + val gcsClient: Storage = + mockk { + every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null + every { create(any()) } returns mockk() + } val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) val blobId = client.blobId(KEY) @@ -196,7 +242,11 @@ class GcsStorageClientTest { @Test fun `write doc`() { - val gcsClient: Storage = mockk() + val gcsClient: Storage = + mockk { + every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null + every { create(any()) } returns mockk() + } val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) val blobId = client.blobId(KEY) @@ -210,7 +260,11 @@ class GcsStorageClientTest { @Test fun `delete doc`() { - val gcsClient: Storage = mockk() + val gcsClient: Storage = + mockk { + every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null + every { create(any()) } returns mockk() + } val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) val blobId = client.blobId(KEY) @@ -263,7 +317,11 @@ class MinioStorageClientTest { @Test fun `key matches`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val clientState = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) assertEquals("/state/$KEY", clientState.key(KEY)) @@ -274,7 +332,11 @@ class MinioStorageClientTest { @Test fun `read missing doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -293,7 +355,11 @@ class MinioStorageClientTest { @Test fun `read existing doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -316,7 +382,11 @@ class MinioStorageClientTest { @Test fun `write doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -334,7 +404,11 @@ class MinioStorageClientTest { @Test fun `delete doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val existsRequest = @@ -368,7 +442,11 @@ class S3StorageClientTest { @Test fun `key matches`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val clientState = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) assertEquals("/state/$KEY", clientState.key(KEY)) @@ -379,7 +457,11 @@ class S3StorageClientTest { @Test fun `read missing doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -398,7 +480,11 @@ class S3StorageClientTest { @Test fun `read existing doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -421,7 +507,11 @@ class S3StorageClientTest { @Test fun `write doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -439,7 +529,11 @@ class S3StorageClientTest { @Test fun `delete doc`() { - val s3Client: S3Client = mockk() + val s3Client: S3Client = + mockk { + every { createBucket(any()) } returns mockk() + every { headBucket(any()) } throws NoSuchBucketException.builder().build() + } val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val existsRequest = diff --git a/airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml b/airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml new file mode 100644 index 00000000000..5a10d04d808 --- /dev/null +++ b/airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml @@ -0,0 +1,5 @@ +--- +properties: + - foo + - bar + - baz diff --git a/airbyte-commons-worker/build.gradle.kts b/airbyte-commons-worker/build.gradle.kts index 30bb3d5e552..25e582f25b1 100644 --- a/airbyte-commons-worker/build.gradle.kts +++ b/airbyte-commons-worker/build.gradle.kts @@ -3,6 +3,9 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -33,7 +36,6 @@ dependencies { implementation(libs.bundles.datadog) implementation(libs.commons.io) implementation(libs.bundles.apache) - implementation(libs.bundles.log4j) implementation(libs.failsafe.okhttp) implementation(libs.google.cloud.storage) implementation(libs.okhttp) @@ -94,6 +96,7 @@ dependencies { testImplementation(libs.assertj.core) testImplementation(libs.junit.pioneer) testImplementation(libs.mockk) + testImplementation(libs.bundles.logback) testRuntimeOnly(libs.junit.jupiter.engine) testRuntimeOnly(libs.javax.databind) diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java index bcd5038d6c2..f48b9b9de61 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java @@ -4,6 +4,7 @@ package io.airbyte.workers.general; +import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_JOB_LOG_PATH_MDC_KEY; import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_LOG_FILENAME; import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_WORKSPACE_MDC_KEY; import static io.airbyte.metrics.lib.OssMetricsRegistry.WORKER_DESTINATION_ACCEPT_TIMEOUT; @@ -46,6 +47,7 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.LocalLogMdcHelper; import io.airbyte.commons.logging.LogMdcHelper; +import io.airbyte.commons.logging.LoggingHelper; import io.airbyte.commons.string.Strings; import io.airbyte.config.ConfigSchema; import io.airbyte.config.FailureReason; @@ -310,6 +312,9 @@ void setup() throws Exception { destinationCatalogGenerator = mock(DestinationCatalogGenerator.class); when(destinationCatalogGenerator.generateDestinationCatalog(any())) .thenReturn(new DestinationCatalogGenerator.CatalogGenerationResult(destinationConfig.getCatalog(), Map.of())); + + MDC.put(DEFAULT_JOB_LOG_PATH_MDC_KEY, jobRoot.toString()); + MDC.put(LoggingHelper.LOG_SOURCE_MDC_KEY, LoggingHelper.platformLogSource()); } @AfterEach @@ -698,6 +703,7 @@ void testLoggingInThreads() throws IOException, WorkerException { @Test void testLogMaskRegex() throws IOException { + MDC.clear(); final Path jobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); MDC.put(DEFAULT_WORKSPACE_MDC_KEY, jobRoot.toString()); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java index 3f8904f7239..bc775f25a33 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java @@ -10,7 +10,6 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; -import static org.mockito.Mockito.verifyNoMoreInteractions; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.MdcScope.Builder; @@ -42,7 +41,6 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.assertj.core.api.Assertions; -import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -74,11 +72,6 @@ void setup() { logger = spy(LoggerFactory.getLogger(VersionedAirbyteStreamFactoryTest.class)); } - @AfterEach() - void afterEach() { - verifyNoMoreInteractions(logger); - } - @Test void testValid() { final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); diff --git a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt index 6aac766195a..40adeb8bcf4 100644 --- a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt +++ b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt @@ -68,6 +68,7 @@ enum class EnvVar { LOCAL_DOCKER_MOUNT, LOCAL_ROOT, LOG4J_CONFIGURATION_FILE, + LOG_IDLE_ROUTE_TTL, LOG_LEVEL, METRIC_CLIENT, @@ -112,10 +113,19 @@ enum class EnvVar { ; /** - * Fetch the value of this [EnvVar], returning [default] if the value is null or an empty string + * Fetch the value of this [EnvVar], returning [default] if the value is null or an empty string. * * @param default value to return if this environment variable is null or empty */ @JvmOverloads fun fetch(default: String? = null): String? = System.getenv(this.name).takeUnless { it.isNullOrBlank() } ?: default + + /** + * Fetch the value of this [EnvVar], returning a non-null [default] if the value is null or an empty string. + * + * @param default value to return if this environment variable is null or empty + * + * If kotlin contracts ever become stable, this method could be replaced with a contract on the [fetch] method. + */ + fun fetchNotNull(default: String = ""): String = System.getenv(this.name).takeUnless { it.isNullOrBlank() } ?: default } diff --git a/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt b/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt index 1148d2ec5a5..de4afb7764e 100644 --- a/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt +++ b/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt @@ -27,4 +27,21 @@ class EnvVarTest { fun `fetch returns null when unset and no default defined`() { assertNull(EnvVar.Z_TESTING_PURPOSES_ONLY_3.fetch()) } + + @Test + fun `fetchNotNull returns the correct value when set`() { + assertEquals("value-defined", EnvVar.Z_TESTING_PURPOSES_ONLY_1.fetchNotNull()) + assertEquals("value-defined", EnvVar.Z_TESTING_PURPOSES_ONLY_1.fetchNotNull(default = "not this value")) + } + + @Test + fun `fetchNotNull returns the default value if missing or blank`() { + val default = "defined as blank, so should return this value instead" + assertEquals(default, EnvVar.Z_TESTING_PURPOSES_ONLY_2.fetchNotNull(default = default)) + } + + @Test + fun `fetchNotNull returns empty string when unset and no default defined`() { + assertEquals("", EnvVar.Z_TESTING_PURPOSES_ONLY_3.fetchNotNull()) + } } diff --git a/airbyte-config/config-persistence/build.gradle.kts b/airbyte-config/config-persistence/build.gradle.kts index b16d1f260e9..fc1aa8d7f65 100644 --- a/airbyte-config/config-persistence/build.gradle.kts +++ b/airbyte-config/config-persistence/build.gradle.kts @@ -8,6 +8,10 @@ configurations.all { exclude(group = "io.micronaut.flyway") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut diff --git a/airbyte-connector-builder-server/build.gradle.kts b/airbyte-connector-builder-server/build.gradle.kts index 571fe6f032c..c93f9bc24a5 100644 --- a/airbyte-connector-builder-server/build.gradle.kts +++ b/airbyte-connector-builder-server/build.gradle.kts @@ -9,6 +9,10 @@ plugins { id("io.airbyte.gradle.kube-reload") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { // Micronaut dependencies annotationProcessor(platform(libs.micronaut.platform)) @@ -44,6 +48,7 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-server")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-worker")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-config:config-persistence")) @@ -59,6 +64,7 @@ dependencies { implementation("io.pebbletemplates:pebble:3.2.2") runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) testRuntimeOnly(libs.junit.jupiter.engine) testImplementation(libs.bundles.junit) diff --git a/airbyte-connector-builder-server/src/main/resources/application.yml b/airbyte-connector-builder-server/src/main/resources/application.yml index 52f7b79fe43..8984c63bcc9 100644 --- a/airbyte-connector-builder-server/src/main/resources/application.yml +++ b/airbyte-connector-builder-server/src/main/resources/application.yml @@ -128,6 +128,11 @@ jackson: logger: levels: - io.airbyte.bootloader: DEBUG - # Uncomment to help resolve issues with conditional beans - # io.micronaut.context.condition: DEBUG + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-connector-rollout-client/build.gradle.kts b/airbyte-connector-rollout-client/build.gradle.kts index 3939070986f..fabb9309d57 100644 --- a/airbyte-connector-rollout-client/build.gradle.kts +++ b/airbyte-connector-rollout-client/build.gradle.kts @@ -18,6 +18,10 @@ repositories { mavenCentral() } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { // TODO: remove the deps not being used compileOnly(libs.lombok) @@ -29,12 +33,15 @@ dependencies { annotationProcessor("info.picocli:picocli-codegen:4.7.4") implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-connector-rollout-shared")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-api:server-api")) implementation(libs.airbyte.protocol) implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.12.5") + runtimeOnly(libs.bundles.logback) + } application { diff --git a/airbyte-connector-rollout-client/src/main/resources/application.yml b/airbyte-connector-rollout-client/src/main/resources/application.yml index c4ec9017422..645492978f7 100644 --- a/airbyte-connector-rollout-client/src/main/resources/application.yml +++ b/airbyte-connector-rollout-client/src/main/resources/application.yml @@ -56,7 +56,13 @@ jackson: mapper: ACCEPT_CASE_INSENSITIVE_ENUMS: true -logging: - level: - root: INFO - io.airbyte: DEBUG +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-connector-rollout-worker/build.gradle.kts b/airbyte-connector-rollout-worker/build.gradle.kts index a801c4a1744..d3ff720371a 100644 --- a/airbyte-connector-rollout-worker/build.gradle.kts +++ b/airbyte-connector-rollout-worker/build.gradle.kts @@ -4,6 +4,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) @@ -18,8 +22,13 @@ dependencies { implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-connector-rollout-shared")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) + + + runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) } airbyte { diff --git a/airbyte-connector-rollout-worker/src/main/resources/application.properties b/airbyte-connector-rollout-worker/src/main/resources/application.properties deleted file mode 100644 index fe87f74840d..00000000000 --- a/airbyte-connector-rollout-worker/src/main/resources/application.properties +++ /dev/null @@ -1 +0,0 @@ -micronaut.application.name=airbyte-connector-rollout-worker diff --git a/airbyte-connector-rollout-worker/src/main/resources/application.yml b/airbyte-connector-rollout-worker/src/main/resources/application.yml index 8d4504d4aa1..2684555bca6 100644 --- a/airbyte-connector-rollout-worker/src/main/resources/application.yml +++ b/airbyte-connector-rollout-worker/src/main/resources/application.yml @@ -78,7 +78,13 @@ jackson: mapper: ACCEPT_CASE_INSENSITIVE_ENUMS: true -logging: - level: - root: INFO - io.airbyte: DEBUG +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-connector-sidecar/build.gradle.kts b/airbyte-connector-sidecar/build.gradle.kts index aaac4102113..306664f7727 100644 --- a/airbyte-connector-sidecar/build.gradle.kts +++ b/airbyte-connector-sidecar/build.gradle.kts @@ -29,6 +29,7 @@ configurations.all { exclude(group = "io.micronaut.openapi") exclude(group = "io.micronaut.flyway") exclude(group = "io.micronaut.sql") + exclude(group="org.apache.logging.log4j") } dependencies { @@ -36,7 +37,6 @@ dependencies { ksp(libs.bundles.micronaut.annotation.processor) implementation(platform(libs.micronaut.platform)) - implementation(libs.bundles.log4j) implementation(libs.bundles.micronaut.light) implementation(libs.google.cloud.storage) implementation(libs.java.jwt) @@ -48,8 +48,8 @@ dependencies { implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-converters")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-protocol")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-worker")) implementation(project(":oss:airbyte-config:config-models")) @@ -59,7 +59,7 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.kotlin.reflect) - runtimeOnly(libs.appender.log4j2) + runtimeOnly(libs.bundles.logback) runtimeOnly(libs.bundles.bouncycastle) // cryptography package kspTest(platform(libs.micronaut.platform)) diff --git a/airbyte-connector-sidecar/src/main/resources/application.yml b/airbyte-connector-sidecar/src/main/resources/application.yml index 57316af5b77..2b59b19e9a3 100644 --- a/airbyte-connector-sidecar/src/main/resources/application.yml +++ b/airbyte-connector-sidecar/src/main/resources/application.yml @@ -67,3 +67,14 @@ airbyte: retries: delay-seconds: ${WORKLOAD_API_RETRY_DELAY_SECONDS:2} max: ${WORKLOAD_API_MAX_RETRIES:5} + +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-container-orchestrator/build.gradle.kts b/airbyte-container-orchestrator/build.gradle.kts index 1fd25b2213d..fdd743ee78b 100644 --- a/airbyte-container-orchestrator/build.gradle.kts +++ b/airbyte-container-orchestrator/build.gradle.kts @@ -23,6 +23,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + val airbyteProtocol by configurations.creating dependencies { @@ -41,17 +45,16 @@ dependencies { implementation(libs.sts) implementation(libs.kubernetes.client) implementation(libs.bundles.datadog) - implementation(libs.bundles.log4j) implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-commons-converters")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-with-dependencies")) implementation(project(":oss:airbyte-commons-worker")) @@ -63,6 +66,7 @@ dependencies { implementation(project(":oss:airbyte-worker-models")) runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-container-orchestrator/src/main/resources/application.yml b/airbyte-container-orchestrator/src/main/resources/application.yml index 748b73fe5c9..30ac58faf07 100644 --- a/airbyte-container-orchestrator/src/main/resources/application.yml +++ b/airbyte-container-orchestrator/src/main/resources/application.yml @@ -145,3 +145,14 @@ endpoints: threaddump: enabled: true sensitive: true + +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-cron/build.gradle.kts b/airbyte-cron/build.gradle.kts index 2dd262791fe..eac5c77830a 100644 --- a/airbyte-cron/build.gradle.kts +++ b/airbyte-cron/build.gradle.kts @@ -4,6 +4,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -35,6 +39,7 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-micronaut")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-config:config-persistence")) @@ -47,6 +52,7 @@ dependencies { implementation(project(":oss:airbyte-persistence:job-persistence")) runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-cron/src/main/resources/application.yml b/airbyte-cron/src/main/resources/application.yml index b89647955b1..e9dc74cdb2b 100644 --- a/airbyte-cron/src/main/resources/application.yml +++ b/airbyte-cron/src/main/resources/application.yml @@ -178,5 +178,11 @@ temporal: logger: levels: -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-data/build.gradle.kts b/airbyte-data/build.gradle.kts index a43b269b267..feea3e101cc 100644 --- a/airbyte-data/build.gradle.kts +++ b/airbyte-data/build.gradle.kts @@ -4,6 +4,10 @@ plugins { `java-test-fixtures` } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -25,6 +29,7 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-license")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-config:config-secrets")) implementation(project(":oss:airbyte-db:db-lib")) diff --git a/airbyte-featureflag-server/build.gradle.kts b/airbyte-featureflag-server/build.gradle.kts index 79d8994d5f7..676d0456252 100644 --- a/airbyte-featureflag-server/build.gradle.kts +++ b/airbyte-featureflag-server/build.gradle.kts @@ -3,6 +3,10 @@ plugins { id("io.airbyte.gradle.docker") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { ksp(libs.bundles.micronaut.annotation.processor) ksp(libs.v3.swagger.annotations) @@ -18,7 +22,6 @@ dependencies { implementation(platform(libs.micronaut.platform)) implementation(libs.bundles.micronaut) implementation(libs.bundles.micronaut.kotlin) - implementation(libs.log4j.impl) implementation(libs.jakarta.ws.rs.api) implementation(libs.micronaut.http) implementation(libs.micronaut.security) @@ -29,6 +32,10 @@ dependencies { implementation(libs.kotlin.logging) implementation(project(":oss:airbyte-commons")) + implementation(project(":oss:airbyte-commons-storage")) + + runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) testImplementation(libs.bundles.micronaut.test) testImplementation(libs.mockk) diff --git a/airbyte-featureflag-server/src/main/resources/application.yml b/airbyte-featureflag-server/src/main/resources/application.yml index d275e9ab59a..5c8fd2b8ed8 100644 --- a/airbyte-featureflag-server/src/main/resources/application.yml +++ b/airbyte-featureflag-server/src/main/resources/application.yml @@ -55,3 +55,14 @@ jackson: mapper: ACCEPT_CASE_INSENSITIVE_ENUMS: true serialization-inclusion: always + +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO + # Uncomment to help resolve issues with conditional beans + # io.micronaut.context.condition: DEBUG diff --git a/airbyte-keycloak-setup/build.gradle.kts b/airbyte-keycloak-setup/build.gradle.kts index 4c511dcc4f0..e6588cef173 100644 --- a/airbyte-keycloak-setup/build.gradle.kts +++ b/airbyte-keycloak-setup/build.gradle.kts @@ -4,6 +4,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -18,10 +22,13 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-data")) implementation(project(":oss:airbyte-db:db-lib")) implementation(project(":oss:airbyte-db:jooq")) + runtimeOnly(libs.bundles.logback) + testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-keycloak-setup/src/main/resources/application.yml b/airbyte-keycloak-setup/src/main/resources/application.yml index 10ab9fcbdf6..caa7017fb25 100644 --- a/airbyte-keycloak-setup/src/main/resources/application.yml +++ b/airbyte-keycloak-setup/src/main/resources/application.yml @@ -44,3 +44,14 @@ datasources: driverClassName: org.postgresql.Driver username: ${DATABASE_USER} password: ${DATABASE_PASSWORD} + +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-metrics/reporter/build.gradle.kts b/airbyte-metrics/reporter/build.gradle.kts index c46fa09b74b..21518e3bee4 100644 --- a/airbyte-metrics/reporter/build.gradle.kts +++ b/airbyte-metrics/reporter/build.gradle.kts @@ -8,6 +8,10 @@ configurations { create("jdbc") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -15,6 +19,7 @@ dependencies { implementation(platform(libs.micronaut.platform)) implementation(libs.bundles.micronaut) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-db:jooq")) implementation(project(":oss:airbyte-db:db-lib")) @@ -22,6 +27,7 @@ dependencies { implementation(libs.jooq) runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-metrics/reporter/src/main/resources/application.yml b/airbyte-metrics/reporter/src/main/resources/application.yml index 27ed71a4f8c..8f24f259bc2 100644 --- a/airbyte-metrics/reporter/src/main/resources/application.yml +++ b/airbyte-metrics/reporter/src/main/resources/application.yml @@ -35,6 +35,11 @@ endpoints: logger: levels: - io.airbyte.bootloader: DEBUG -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-notification/build.gradle.kts b/airbyte-notification/build.gradle.kts index f04449c1162..3328531a07e 100644 --- a/airbyte-notification/build.gradle.kts +++ b/airbyte-notification/build.gradle.kts @@ -3,6 +3,10 @@ plugins { id("io.airbyte.gradle.publish") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -23,8 +27,6 @@ dependencies { implementation(libs.commons.io) implementation(platform(libs.fasterxml)) implementation(libs.bundles.jackson) - // TODO remove this, it"s used for String.isEmpty check) - implementation(libs.bundles.log4j) testImplementation(libs.mockk) testRuntimeOnly(libs.junit.jupiter.engine) diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java index 6c3755424b7..91456a1d525 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java @@ -21,6 +21,7 @@ import io.airbyte.notification.slack.Field; import io.airbyte.notification.slack.Notification; import io.airbyte.notification.slack.Section; +import io.micronaut.core.util.StringUtils; import java.io.IOException; import java.net.URI; import java.net.http.HttpClient; @@ -29,7 +30,6 @@ import java.util.Comparator; import java.util.List; import java.util.Optional; -import org.apache.logging.log4j.util.Strings; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -222,7 +222,7 @@ public boolean notifySchemaPropagated(final SchemaUpdateNotification notificatio notification.getWorkspace().getUrl(), notification.getSourceInfo().getUrl()); final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { + if (!StringUtils.isEmpty(webhookUrl)) { return notifyJson(slackNotification.toJsonNode()); } return false; @@ -334,7 +334,7 @@ private boolean notify(final String message) throws IOException, InterruptedExce } private boolean notifyJson(final JsonNode node) throws IOException, InterruptedException { - if (Strings.isEmpty(config.getWebhook())) { + if (StringUtils.isEmpty(config.getWebhook())) { return false; } final ObjectMapper mapper = new ObjectMapper(); @@ -367,7 +367,7 @@ public String getNotificationClientType() { */ public boolean notifyTest(final String message) throws IOException, InterruptedException { final String webhookUrl = config.getWebhook(); - if (!Strings.isEmpty(webhookUrl)) { + if (!StringUtils.isEmpty(webhookUrl)) { return notify(message); } return false; diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java index 7e599673a59..0de0532344e 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java @@ -24,6 +24,7 @@ import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.SyncStats; import io.airbyte.config.helpers.ScheduleHelpers; +import io.micronaut.core.util.StringUtils; import java.util.Collection; import java.util.Comparator; import java.util.LinkedHashMap; @@ -31,7 +32,6 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; -import org.apache.logging.log4j.util.Strings; /** * Helpers to fetch stats / metadata about Airbyte domain models and turn them into flat maps that @@ -126,7 +126,7 @@ private static Map generateActorDefinitionVersionMetadata(final final Builder metadata = ImmutableMap.builder(); metadata.put(metaPrefix + "docker_repository", sourceVersion.getDockerRepository()); final String imageTag = sourceVersion.getDockerImageTag(); - if (!Strings.isEmpty(imageTag)) { + if (!StringUtils.isEmpty(imageTag)) { metadata.put(metaPrefix + "version", imageTag); } return metadata.build(); @@ -149,7 +149,7 @@ public static Map generateJobAttemptMetadata(final Job job) { if (attempts == null || attempts.isEmpty()) { return metadata.build(); } - final Attempt lastAttempt = attempts.get(attempts.size() - 1); + final Attempt lastAttempt = attempts.getLast(); if (lastAttempt.getOutput() == null || lastAttempt.getOutput().isEmpty()) { return metadata.build(); } @@ -216,7 +216,7 @@ public static Map generateJobAttemptMetadata(final Job job) { final List failureReasons = failureReasonsList(attempts); if (!failureReasons.isEmpty()) { metadata.put("failure_reasons", failureReasonsListAsJson(failureReasons).toString()); - metadata.put("main_failure_reason", failureReasonAsJson(failureReasons.get(0)).toString()); + metadata.put("main_failure_reason", failureReasonAsJson(failureReasons.getFirst()).toString()); } return metadata.build(); } diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 0a2f75dfab6..54061c67e1e 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -5,6 +5,10 @@ plugins { id("io.airbyte.gradle.kube-reload") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -39,7 +43,6 @@ dependencies { implementation(libs.swagger.annotations) implementation(libs.google.cloud.storage) implementation(libs.cron.utils) - implementation(libs.log4j.slf4j2.impl) // Because cron-utils uses slf4j 2.0+ implementation(libs.jakarta.ws.rs.api) implementation(libs.jakarta.validation.api) implementation(libs.kubernetes.client) @@ -52,9 +55,9 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-converters")) implementation(project(":oss:airbyte-commons-license")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-server")) @@ -75,7 +78,9 @@ dependencies { implementation(libs.airbyte.protocol) implementation(project(":oss:airbyte-persistence:job-persistence")) + runtimeOnly(libs.snakeyaml) runtimeOnly(libs.javax.databind) + runtimeOnly(libs.bundles.logback) // Required for local database secret hydration) runtimeOnly(libs.hikaricp) diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index ded8be3a392..acf16a87251 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -450,6 +450,12 @@ jooq: logger: levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO # Uncomment to help resolve issues with conditional beans # io.micronaut.context.condition: DEBUG # Uncomment to help resolve issues with security beans diff --git a/airbyte-workers/build.gradle.kts b/airbyte-workers/build.gradle.kts index 49331274c13..63c65c27c99 100644 --- a/airbyte-workers/build.gradle.kts +++ b/airbyte-workers/build.gradle.kts @@ -30,6 +30,7 @@ val jdbc by configurations.creating configurations.all { // The quartz-scheduler brings in an outdated version(of hikari, we do not want to inherit this version.) exclude(group = "com.zaxxer", module = "HikariCP-java7") + exclude(group="org.apache.logging.log4j") } dependencies { @@ -73,10 +74,10 @@ dependencies { implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-converters")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) implementation(project(":oss:airbyte-commons-protocol")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-worker")) @@ -98,6 +99,7 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.javax.databind) + runtimeOnly(libs.bundles.logback) testCompileOnly(libs.lombok) testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut diff --git a/airbyte-workers/src/main/resources/application.yml b/airbyte-workers/src/main/resources/application.yml index 4b3d8300e8e..3661cb71ac6 100644 --- a/airbyte-workers/src/main/resources/application.yml +++ b/airbyte-workers/src/main/resources/application.yml @@ -294,6 +294,11 @@ temporal: logger: levels: - io.airbyte.bootloader: DEBUG -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-workload-api-server/build.gradle.kts b/airbyte-workload-api-server/build.gradle.kts index 4ca59ef7597..14c183f1501 100644 --- a/airbyte-workload-api-server/build.gradle.kts +++ b/airbyte-workload-api-server/build.gradle.kts @@ -4,6 +4,10 @@ plugins { id("io.airbyte.gradle.docker") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { ksp(libs.v3.swagger.annotations) ksp(platform(libs.micronaut.platform)) @@ -25,7 +29,6 @@ dependencies { implementation(libs.jakarta.transaction.api) implementation(libs.bundles.temporal) implementation(libs.bundles.temporal.telemetry) - implementation(libs.log4j.impl) implementation(libs.micronaut.jaxrs.server) implementation(libs.jakarta.ws.rs.api) implementation(libs.micronaut.security) @@ -40,6 +43,7 @@ dependencies { implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-commons")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-featureflag")) @@ -50,6 +54,7 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.javax.databind) + runtimeOnly(libs.bundles.logback) kspTest(platform(libs.micronaut.platform)) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-workload-api-server/src/main/resources/application.yml b/airbyte-workload-api-server/src/main/resources/application.yml index b1d05b074a7..78f4bd85d4e 100644 --- a/airbyte-workload-api-server/src/main/resources/application.yml +++ b/airbyte-workload-api-server/src/main/resources/application.yml @@ -132,14 +132,20 @@ temporal: logger: levels: - # io.micronaut.data.query: TRACE - # Uncomment to help resolve issues with conditional beans - # io.micronaut.context.condition: DEBUG + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG # Uncomment to help resolve issues with security beans -# io.micronaut.security: DEBUG +# io.micronaut.security: DEBUG # Uncomment to help resolve issues with micronaut data -# com.zaxxer.hikari.HikariConfig: DEBUG -# com.zaxxer.hikari: TRACE +# io.micronaut.data.query: TRACE +# com.zaxxer.hikari.HikariConfig: DEBUG +# com.zaxxer.hikari: TRACE jackson: mapper: diff --git a/airbyte-workload-init-container/build.gradle.kts b/airbyte-workload-init-container/build.gradle.kts index 013d67cfa42..28bf715d863 100644 --- a/airbyte-workload-init-container/build.gradle.kts +++ b/airbyte-workload-init-container/build.gradle.kts @@ -8,6 +8,7 @@ configurations.all { exclude(group = "io.micronaut", module = "micronaut-http-server-netty") exclude(group = "io.micronaut.openapi") exclude(group = "io.micronaut.flyway") + exclude(group="org.apache.logging.log4j") } dependencies { @@ -24,6 +25,7 @@ dependencies { implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-config:config-secrets")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-worker")) implementation(project(":oss:airbyte-featureflag")) implementation(project(":oss:airbyte-mappers")) @@ -31,6 +33,9 @@ dependencies { implementation(project(":oss:airbyte-worker-models")) implementation(project(":oss:airbyte-commons-protocol")) + runtimeOnly(libs.snakeyaml) + runtimeOnly(libs.bundles.logback) + kspTest(platform(libs.micronaut.platform)) kspTest(libs.bundles.micronaut.annotation.processor) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-workload-init-container/src/main/resources/application.yml b/airbyte-workload-init-container/src/main/resources/application.yml index e2cddf984eb..f19c45a83e3 100644 --- a/airbyte-workload-init-container/src/main/resources/application.yml +++ b/airbyte-workload-init-container/src/main/resources/application.yml @@ -67,3 +67,14 @@ airbyte: retries: delay-seconds: ${WORKLOAD_API_RETRY_DELAY_SECONDS:2} max: ${WORKLOAD_API_MAX_RETRIES:5} + +logger: + levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-workload-launcher/build.gradle.kts b/airbyte-workload-launcher/build.gradle.kts index df337de64b2..92742d421c1 100644 --- a/airbyte-workload-launcher/build.gradle.kts +++ b/airbyte-workload-launcher/build.gradle.kts @@ -4,6 +4,10 @@ plugins { id("io.airbyte.gradle.docker") } +configurations.all { + exclude(group="org.apache.logging.log4j") +} + dependencies { ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) @@ -11,7 +15,6 @@ dependencies { implementation(libs.bundles.datadog) implementation(libs.bundles.kubernetes.client) - implementation(libs.bundles.log4j) implementation(libs.bundles.micronaut) implementation(libs.bundles.temporal) implementation(libs.bundles.temporal.telemetry) @@ -35,6 +38,7 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-micronaut")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-with-dependencies")) @@ -49,8 +53,8 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.kotlin.reflect) - runtimeOnly(libs.appender.log4j2) runtimeOnly(libs.bundles.bouncycastle) + runtimeOnly(libs.bundles.logback) // Required for secret hydration in OSS runtimeOnly(libs.hikaricp) diff --git a/airbyte-workload-launcher/src/main/resources/application.yml b/airbyte-workload-launcher/src/main/resources/application.yml index e3aadac1dca..5c94d5f623d 100644 --- a/airbyte-workload-launcher/src/main/resources/application.yml +++ b/airbyte-workload-launcher/src/main/resources/application.yml @@ -298,6 +298,12 @@ temporal: logger: levels: + com.zaxxer.hikari: ERROR + com.zaxxer.hikari.pool: ERROR + io.grpc: INFO + io.fabric8.kubernetes.client: INFO + io.netty: INFO + io.temporal: INFO # Uncomment to help resolve issues with conditional beans # io.micronaut.context.condition: DEBUG diff --git a/deps.toml b/deps.toml index 141f7190a6f..cc8c65bf91c 100644 --- a/deps.toml +++ b/deps.toml @@ -24,6 +24,7 @@ kotest = "5.9.1" kotlin-logging = "5.1.0" kubernetes-client = "6.12.1" log4j = "2.23.1" +logback = "1.5.8" lombok = "1.18.34" micronaut = "4.6.2" micronaut-cache = "5.0.1" @@ -115,6 +116,7 @@ jakarta-persistence-api = { module = "jakarta.persistence:jakarta.persistence-ap jakarta-transaction-api = { module = "jakarta.transaction:jakarta.transaction-api", version = "2.0.1" } jakarta-validation-api = { module = "jakarta.validation:jakarta.validation-api", version = "3.0.2" } jakarta-ws-rs-api = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "jax-rs" } +janino = { module = "org.codehaus.janino:janino", version = "3.1.12" } java-dogstatsd-client = { module = "com.datadoghq:java-dogstatsd-client", version = "4.1.0" } java-jwt = { module = "com.auth0:java-jwt", version = "3.19.2" } javax-databind = { module = "javax.xml.bind:jaxb-api", version = "2.4.0-b180830.0359" } @@ -155,6 +157,8 @@ log4j-impl = { module = "org.apache.logging.log4j:log4j-slf4j-impl", version.ref log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j" } log4j-slf4j2-impl = { module = "org.apache.logging.log4j:log4j-slf4j2-impl", version.ref = "log4j" } log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } +logback-classic = { module = "ch.qos.logback:logback-classic", version.ref = "logback" } +logback-core = { module = "ch.qos.logback:logback-core", version.ref = "logback" } lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } micrometer-statsd = { module = "io.micrometer:micrometer-registry-statsd", version = "1.9.3" } moshi-kotlin = { module = "com.squareup.moshi:moshi-kotlin", version.ref = "moshi" } @@ -259,6 +263,7 @@ keycloak-client = ["keycloak-admin-client", "keycloak-client-registration-api"] kotest = ["kotest-assertions"] kubernetes-client = ["kubernetes-client-api", "kubernetes-client"] log4j = ["log4j-api", "log4j-core", "log4j-impl", "log4j-slf4j2-impl", "log4j-web", "appender-log4j2"] +logback = ["logback-classic", "logback-core", "janino"] micronaut = ["jakarta-annotation-api", "jakarta-transaction-api", "micronaut-http-server-netty", "micronaut-http-client", "micronaut-inject", "micronaut-validation", "micronaut-runtime", "micronaut-management", "micronaut-flyway", "micronaut-jdbc-hikari", "micronaut-jooq", "micronaut-jackson-databind"] micronaut-light = ["jakarta-annotation-api", "micronaut-inject-java", "micronaut-runtime"] micronaut-annotation = ["jakarta-annotation-api", "micronaut-inject-java", "micronaut-inject-kotlin"] From 575ed5f67469bc1e8aeca97b3cb0b30cbd76f2dc Mon Sep 17 00:00:00 2001 From: Jonathan Pearlin Date: Mon, 30 Sep 2024 16:06:47 -0400 Subject: [PATCH 21/36] refactor: rever convert logging configuration to Logback (#14113) (#14189) --- airbyte-bootloader/build.gradle.kts | 6 - .../src/main/resources/application.yml | 10 +- airbyte-commons-server/build.gradle.kts | 9 +- .../errors/IdNotFoundKnownException.java | 5 +- .../commons/server/errors/KnownException.java | 14 +- .../InvalidInputExceptionHandler.java | 4 +- .../handlers/helpers/ConnectionMatcher.java | 8 +- .../handlers/helpers/DestinationMatcher.java | 6 +- .../handlers/helpers/SourceMatcher.java | 6 +- airbyte-commons-storage/build.gradle.kts | 3 - .../logback/AirbyteCloudStorageAppender.kt | 174 -------- .../logback/AirbyteLogbackCustomConfigurer.kt | 405 ------------------ .../logging/logback/AirbyteLogbackUtils.kt | 39 -- ...irbyteOperationsJobLogbackMessageLayout.kt | 47 -- .../AirbytePlatformLogbackMessageLayout.kt | 126 ------ .../AirbyteStorageMDCBasedDiscriminator.kt | 21 - .../logging/logback/MaskedDataConverter.kt | 156 ------- .../airbyte/commons/storage/StorageClient.kt | 47 -- .../ch.qos.logback.classic.spi.Configurator | 1 - .../AirbyteCloudStorageAppenderTest.kt | 192 --------- .../AirbyteLogbackCustomConfigurerTest.kt | 213 --------- ...teOperationsJobLogbackMessageLayoutTest.kt | 131 ------ ...AirbytePlatformLogbackMessageLayoutTest.kt | 224 ---------- ...AirbyteStorageMDCBasedDiscriminatorTest.kt | 47 -- .../logback/MaskedDataConverterTest.kt | 186 -------- .../storage/StorageClientFactoryTest.kt | 24 +- .../commons/storage/StorageClientTest.kt | 132 +----- .../test/resources/test_spec_secret_mask.yaml | 5 - airbyte-commons-worker/build.gradle.kts | 5 +- .../general/ReplicationWorkerTest.java | 6 - .../VersionedAirbyteStreamFactoryTest.java | 7 + .../io/airbyte/commons/envvar/EnvVar.kt | 12 +- .../io/airbyte/commons/envvar/EnvVarTest.kt | 17 - .../config-persistence/build.gradle.kts | 4 - .../build.gradle.kts | 6 - .../src/main/resources/application.yml | 11 +- .../build.gradle.kts | 7 - .../src/main/resources/application.yml | 14 +- .../build.gradle.kts | 9 - .../src/main/resources/application.properties | 1 + .../src/main/resources/application.yml | 14 +- airbyte-connector-sidecar/build.gradle.kts | 6 +- .../src/main/resources/application.yml | 11 - .../build.gradle.kts | 8 +- .../src/main/resources/application.yml | 11 - airbyte-cron/build.gradle.kts | 6 - .../src/main/resources/application.yml | 10 +- airbyte-data/build.gradle.kts | 5 - airbyte-featureflag-server/build.gradle.kts | 9 +- .../src/main/resources/application.yml | 11 - airbyte-keycloak-setup/build.gradle.kts | 7 - .../src/main/resources/application.yml | 11 - airbyte-metrics/reporter/build.gradle.kts | 6 - .../src/main/resources/application.yml | 11 +- airbyte-notification/build.gradle.kts | 6 +- .../notification/SlackNotificationClient.java | 8 +- .../job/tracker/TrackingMetadata.java | 8 +- airbyte-server/build.gradle.kts | 9 +- .../src/main/resources/application.yml | 6 - airbyte-workers/build.gradle.kts | 4 +- .../src/main/resources/application.yml | 11 +- airbyte-workload-api-server/build.gradle.kts | 7 +- .../src/main/resources/application.yml | 18 +- .../build.gradle.kts | 5 - .../src/main/resources/application.yml | 11 - airbyte-workload-launcher/build.gradle.kts | 8 +- .../src/main/resources/application.yml | 6 - deps.toml | 5 - 68 files changed, 101 insertions(+), 2477 deletions(-) delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt delete mode 100644 airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt delete mode 100644 airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator delete mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt delete mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt delete mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt delete mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt delete mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt delete mode 100644 airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt delete mode 100644 airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml create mode 100644 airbyte-connector-rollout-worker/src/main/resources/application.properties diff --git a/airbyte-bootloader/build.gradle.kts b/airbyte-bootloader/build.gradle.kts index a64ecff2f4d..1ef19396d93 100644 --- a/airbyte-bootloader/build.gradle.kts +++ b/airbyte-bootloader/build.gradle.kts @@ -4,10 +4,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -27,7 +23,6 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-micronaut")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-config:init")) implementation(project(":oss:airbyte-config:specs")) implementation(project(":oss:airbyte-config:config-models")) @@ -42,7 +37,6 @@ dependencies { implementation(project(":oss:airbyte-persistence:job-persistence")) runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.annotation.processor) diff --git a/airbyte-bootloader/src/main/resources/application.yml b/airbyte-bootloader/src/main/resources/application.yml index c973bc04829..a41a655a056 100644 --- a/airbyte-bootloader/src/main/resources/application.yml +++ b/airbyte-bootloader/src/main/resources/application.yml @@ -147,11 +147,5 @@ jooq: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-commons-server/build.gradle.kts b/airbyte-commons-server/build.gradle.kts index 99890673170..b5f18a7d2e7 100644 --- a/airbyte-commons-server/build.gradle.kts +++ b/airbyte-commons-server/build.gradle.kts @@ -3,10 +3,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -40,13 +36,12 @@ dependencies { implementation(libs.quartz.scheduler) implementation(libs.temporal.sdk) implementation(libs.swagger.annotations) + implementation(libs.bundles.log4j) implementation(libs.commons.io) - implementation(libs.apache.commons.lang) implementation(libs.kotlin.logging) implementation(libs.reactor.core) implementation(libs.jakarta.ws.rs.api) implementation(libs.kubernetes.client) - implementation(libs.guava) implementation(project(":oss:airbyte-analytics")) implementation(project(":oss:airbyte-api:connector-builder-api")) @@ -56,8 +51,8 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-converters")) implementation(project(":oss:airbyte-commons-license")) - implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-storage")) + implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-with-dependencies")) diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java index d5848ab1010..f5bcf007525 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/IdNotFoundKnownException.java @@ -5,6 +5,7 @@ package io.airbyte.commons.server.errors; import io.airbyte.api.model.generated.NotFoundKnownExceptionInfo; +import org.apache.logging.log4j.core.util.Throwables; /** * Thrown when an api input requests an id that does not exist. @@ -45,10 +46,10 @@ public NotFoundKnownExceptionInfo getNotFoundKnownExceptionInfo() { final NotFoundKnownExceptionInfo exceptionInfo = new NotFoundKnownExceptionInfo() .exceptionClassName(this.getClass().getName()) .message(this.getMessage()) - .exceptionStack(getStackTraceAsList(this)); + .exceptionStack(Throwables.toStringList(this)); if (this.getCause() != null) { exceptionInfo.rootCauseExceptionClassName(this.getClass().getClass().getName()); - exceptionInfo.rootCauseExceptionStack(getStackTraceAsList(this.getCause())); + exceptionInfo.rootCauseExceptionStack(Throwables.toStringList(this.getCause())); } exceptionInfo.id(this.getId()); return exceptionInfo; diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java index 2d9db718b38..4cffeee75d8 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/KnownException.java @@ -4,12 +4,9 @@ package io.airbyte.commons.server.errors; -import com.google.common.base.Throwables; import io.airbyte.api.model.generated.KnownExceptionInfo; -import java.util.List; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.Stream; +import org.apache.logging.log4j.core.util.Throwables; /** * Exception wrapper to handle formatting API exception outputs nicely. @@ -48,11 +45,6 @@ public KnownExceptionInfo getKnownExceptionInfo() { return KnownException.infoFromThrowable(this, details); } - public static List getStackTraceAsList(final Throwable throwable) { - final String[] stackTrace = Throwables.getStackTraceAsString(throwable).split("\n"); - return Stream.of(stackTrace).collect(Collectors.toList()); - } - public static KnownExceptionInfo infoFromThrowableWithMessage(final Throwable t, final String message) { return infoFromThrowableWithMessage(t, message, null); // Call the other static method with null details } @@ -69,11 +61,11 @@ public static KnownExceptionInfo infoFromThrowableWithMessage(final Throwable t, final KnownExceptionInfo exceptionInfo = new KnownExceptionInfo() .exceptionClassName(t.getClass().getName()) .message(message) - .exceptionStack(getStackTraceAsList(t)); + .exceptionStack(Throwables.toStringList(t)); if (t.getCause() != null) { exceptionInfo.rootCauseExceptionClassName(t.getCause().getClass().getName()); - exceptionInfo.rootCauseExceptionStack(getStackTraceAsList(t.getCause())); + exceptionInfo.rootCauseExceptionStack(Throwables.toStringList(t.getCause())); } if (details != null) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java index b3e3f69a212..e19e7ea88d4 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/errors/handlers/InvalidInputExceptionHandler.java @@ -7,7 +7,6 @@ import io.airbyte.api.model.generated.InvalidInputExceptionInfo; import io.airbyte.api.model.generated.InvalidInputProperty; import io.airbyte.commons.json.Jsons; -import io.airbyte.commons.server.errors.KnownException; import io.micronaut.context.annotation.Replaces; import io.micronaut.context.annotation.Requires; import io.micronaut.http.HttpRequest; @@ -22,6 +21,7 @@ import jakarta.validation.ConstraintViolationException; import java.util.ArrayList; import java.util.List; +import org.apache.logging.log4j.core.util.Throwables; /** * https://www.baeldung.com/jersey-bean-validation#custom-exception-handler. handles exceptions @@ -53,7 +53,7 @@ public static InvalidInputExceptionInfo infoFromConstraints(final ConstraintViol final InvalidInputExceptionInfo exceptionInfo = new InvalidInputExceptionInfo() .exceptionClassName(cve.getClass().getName()) .message("Some properties contained invalid input.") - .exceptionStack(KnownException.getStackTraceAsList(cve)); + .exceptionStack(Throwables.toStringList(cve)); final List props = new ArrayList(); for (final ConstraintViolation cv : cve.getConstraintViolations()) { diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java index 73fa5e9bb42..d56db348fde 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/ConnectionMatcher.java @@ -6,7 +6,7 @@ import io.airbyte.api.model.generated.ConnectionRead; import io.airbyte.api.model.generated.ConnectionSearch; -import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.util.Strings; /** * Constructs a query for finding a query. @@ -28,13 +28,13 @@ public ConnectionRead match(final ConnectionRead query) { final ConnectionRead fromSearch = new ConnectionRead(); fromSearch.connectionId(search.getConnectionId() == null ? query.getConnectionId() : search.getConnectionId()); fromSearch.destinationId(search.getDestinationId() == null ? query.getDestinationId() : search.getDestinationId()); - fromSearch.name(StringUtils.isBlank(search.getName()) ? query.getName() : search.getName()); - fromSearch.namespaceFormat(StringUtils.isBlank(search.getNamespaceFormat()) || "null".equals(search.getNamespaceFormat()) + fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); + fromSearch.namespaceFormat(Strings.isBlank(search.getNamespaceFormat()) || "null".equals(search.getNamespaceFormat()) ? query.getNamespaceFormat() : search.getNamespaceFormat()); fromSearch.namespaceDefinition( search.getNamespaceDefinition() == null ? query.getNamespaceDefinition() : search.getNamespaceDefinition()); - fromSearch.prefix(StringUtils.isBlank(search.getPrefix()) ? query.getPrefix() : search.getPrefix()); + fromSearch.prefix(Strings.isBlank(search.getPrefix()) ? query.getPrefix() : search.getPrefix()); fromSearch.schedule(search.getSchedule() == null ? query.getSchedule() : search.getSchedule()); fromSearch.scheduleType(search.getScheduleType() == null ? query.getScheduleType() : search.getScheduleType()); fromSearch.scheduleData(search.getScheduleData() == null ? query.getScheduleData() : search.getScheduleData()); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java index fef914465a6..c59a24e8db9 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/DestinationMatcher.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.api.model.generated.DestinationRead; import io.airbyte.api.model.generated.DestinationSearch; -import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.util.Strings; /** * Constructs a query for finding a query. @@ -28,13 +28,13 @@ public DestinationRead match(final DestinationRead query) { } final DestinationRead fromSearch = new DestinationRead(); - fromSearch.name(StringUtils.isBlank(search.getName()) ? query.getName() : search.getName()); + fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); fromSearch.destinationDefinitionId(search.getDestinationDefinitionId() == null ? query.getDestinationDefinitionId() : search.getDestinationDefinitionId()); fromSearch .destinationId(search.getDestinationId() == null ? query.getDestinationId() : search.getDestinationId()); fromSearch.destinationName( - StringUtils.isBlank(search.getDestinationName()) ? query.getDestinationName() : search.getDestinationName()); + Strings.isBlank(search.getDestinationName()) ? query.getDestinationName() : search.getDestinationName()); fromSearch.workspaceId(search.getWorkspaceId() == null ? query.getWorkspaceId() : search.getWorkspaceId()); fromSearch.icon(query.getIcon()); fromSearch.isVersionOverrideApplied(query.getIsVersionOverrideApplied()); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java index cb212c5d5c4..1dee4c333a2 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/helpers/SourceMatcher.java @@ -8,7 +8,7 @@ import com.fasterxml.jackson.databind.node.ObjectNode; import io.airbyte.api.model.generated.SourceRead; import io.airbyte.api.model.generated.SourceSearch; -import org.apache.commons.lang3.StringUtils; +import org.apache.logging.log4j.util.Strings; /** * Constructs a query for finding a query. @@ -28,10 +28,10 @@ public SourceRead match(final SourceRead query) { } final SourceRead fromSearch = new SourceRead(); - fromSearch.name(StringUtils.isBlank(search.getName()) ? query.getName() : search.getName()); + fromSearch.name(Strings.isBlank(search.getName()) ? query.getName() : search.getName()); fromSearch.sourceDefinitionId(search.getSourceDefinitionId() == null ? query.getSourceDefinitionId() : search.getSourceDefinitionId()); fromSearch.sourceId(search.getSourceId() == null ? query.getSourceId() : search.getSourceId()); - fromSearch.sourceName(StringUtils.isBlank(search.getSourceName()) ? query.getSourceName() : search.getSourceName()); + fromSearch.sourceName(Strings.isBlank(search.getSourceName()) ? query.getSourceName() : search.getSourceName()); fromSearch.workspaceId(search.getWorkspaceId() == null ? query.getWorkspaceId() : search.getWorkspaceId()); fromSearch.icon(query.getIcon()); fromSearch.isVersionOverrideApplied(query.getIsVersionOverrideApplied()); diff --git a/airbyte-commons-storage/build.gradle.kts b/airbyte-commons-storage/build.gradle.kts index ba858b28410..4fd322bddb7 100644 --- a/airbyte-commons-storage/build.gradle.kts +++ b/airbyte-commons-storage/build.gradle.kts @@ -17,8 +17,6 @@ dependencies { api(libs.aws.java.sdk.sts) api(libs.s3) api(libs.google.cloud.storage) - api(libs.guava) - api(libs.slf4j.api) api(project(":oss:airbyte-commons")) api(project(":oss:airbyte-metrics:metrics-lib")) @@ -28,7 +26,6 @@ dependencies { // TODO: This is deprecated, but required to make the real van logging solution happy. implementation("com.microsoft.azure:azure-storage:8.6.6") implementation(libs.micronaut.inject) - implementation(libs.bundles.logback) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt deleted file mode 100644 index 2794787f19f..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppender.kt +++ /dev/null @@ -1,174 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.AppenderBase -import ch.qos.logback.core.encoder.Encoder -import com.google.common.util.concurrent.ThreadFactoryBuilder -import io.airbyte.commons.envvar.EnvVar -import io.airbyte.commons.storage.AzureStorageClient -import io.airbyte.commons.storage.AzureStorageConfig -import io.airbyte.commons.storage.DocumentType -import io.airbyte.commons.storage.GcsStorageClient -import io.airbyte.commons.storage.GcsStorageConfig -import io.airbyte.commons.storage.LocalStorageClient -import io.airbyte.commons.storage.LocalStorageConfig -import io.airbyte.commons.storage.MinioStorageClient -import io.airbyte.commons.storage.MinioStorageConfig -import io.airbyte.commons.storage.S3StorageClient -import io.airbyte.commons.storage.S3StorageConfig -import io.airbyte.commons.storage.StorageBucketConfig -import io.airbyte.commons.storage.StorageClient -import java.net.InetAddress -import java.time.LocalDateTime -import java.time.format.DateTimeFormatter -import java.util.UUID -import java.util.concurrent.Executors -import java.util.concurrent.LinkedBlockingQueue -import java.util.concurrent.TimeUnit - -/** - * Custom Logback [AppenderBase] that uploads log events to remove storage. Log data - * is uploaded on a scheduled cadence that produces a new remote storage file each time. - * This is necessary because most cloud storage systems do not support an append mode. - */ -class AirbyteCloudStorageAppender( - val encoder: Encoder, - val baseStorageId: String, - val documentType: DocumentType, - val storageClient: StorageClient = buildStorageClient(storageConfig = buildStorageConfig(), documentType = documentType), - val period: Long = 60L, - val unit: TimeUnit = TimeUnit.SECONDS, -) : AppenderBase() { - private val buffer = LinkedBlockingQueue() - private val hostname = InetAddress.getLocalHost().hostName - private val executorService = - Executors.newScheduledThreadPool( - 1, - ThreadFactoryBuilder().setNameFormat("airbyte-cloud-storage-appender-%d").build(), - ) - private val uniqueIdentifier = UUID.randomUUID().toString().replace("-", "") - private var currentStorageId: String = composeId() - - override fun start() { - super.start() - executorService.scheduleAtFixedRate(this::upload, period, period, unit) - } - - override fun stop() { - try { - super.stop() - executorService.shutdownNow() - executorService.awaitTermination(30, TimeUnit.SECONDS) - } finally { - // Do one final upload attempt to make sure all logs are published - upload() - } - } - - override fun append(eventObject: ILoggingEvent) { - buffer.offer(encoder.encode(eventObject).decodeToString()) - } - - private fun upload() { - val messages = mutableListOf() - buffer.drainTo(messages) - - if (messages.isNotEmpty()) { - storageClient.write(id = currentStorageId, document = messages.joinToString(separator = "")) - - // Move to next file to avoid overwriting in log storage that doesn't support append mode - this.currentStorageId = composeId() - } - } - - private fun composeId(): String { - // Remove the trailing "/" from the base storage ID if present to avoid duplicates in the storage ID - return "${baseStorageId.trimEnd('/')}/${LocalDateTime.now().format(DATE_FORMAT)}_${hostname}_$uniqueIdentifier" - } -} - -internal fun buildStorageClient( - documentType: DocumentType, - storageConfig: Map, -): StorageClient { - val storageType = storageConfig[EnvVar.STORAGE_TYPE] ?: "" - val bucketConfig = buildBucketConfig(storageConfig = storageConfig) - - return when (storageType.lowercase()) { - "azure" -> - AzureStorageClient( - config = - AzureStorageConfig( - buckets = bucketConfig, - connectionString = storageConfig[EnvVar.AZURE_STORAGE_CONNECTION_STRING]!!, - ), - type = documentType, - ) - "gcs" -> - GcsStorageClient( - config = - GcsStorageConfig( - buckets = bucketConfig, - applicationCredentials = storageConfig[EnvVar.GOOGLE_APPLICATION_CREDENTIALS]!!, - ), - type = documentType, - ) - "minio" -> - MinioStorageClient( - config = - MinioStorageConfig( - buckets = bucketConfig, - accessKey = storageConfig[EnvVar.AWS_ACCESS_KEY_ID]!!, - secretAccessKey = storageConfig[EnvVar.AWS_SECRET_ACCESS_KEY]!!, - endpoint = storageConfig[EnvVar.MINIO_ENDPOINT]!!, - ), - type = documentType, - ) - "s3" -> - S3StorageClient( - config = - S3StorageConfig( - buckets = bucketConfig, - accessKey = storageConfig[EnvVar.AWS_ACCESS_KEY_ID]!!, - secretAccessKey = storageConfig[EnvVar.AWS_SECRET_ACCESS_KEY]!!, - region = storageConfig[EnvVar.AWS_DEFAULT_REGION]!!, - ), - type = documentType, - ) - else -> - LocalStorageClient( - config = - LocalStorageConfig( - buckets = bucketConfig, - root = "/tmp/local-storage", - ), - type = documentType, - ) - } -} - -private val DATE_FORMAT: DateTimeFormatter = DateTimeFormatter.ofPattern("yyyyMMddHHmmss") - -internal fun buildBucketConfig(storageConfig: Map): StorageBucketConfig = - StorageBucketConfig( - log = storageConfig[EnvVar.STORAGE_BUCKET_LOG] ?: throw IllegalArgumentException("Missing ${EnvVar.STORAGE_BUCKET_LOG.name} env-var"), - state = "", - workloadOutput = "", - activityPayload = "", - ) - -private fun buildStorageConfig(): Map = - mapOf( - EnvVar.STORAGE_TYPE to EnvVar.STORAGE_TYPE.fetchNotNull(), - EnvVar.STORAGE_BUCKET_LOG to EnvVar.STORAGE_BUCKET_LOG.fetchNotNull(), - EnvVar.AZURE_STORAGE_CONNECTION_STRING to EnvVar.AZURE_STORAGE_CONNECTION_STRING.fetchNotNull(), - EnvVar.GOOGLE_APPLICATION_CREDENTIALS to EnvVar.GOOGLE_APPLICATION_CREDENTIALS.fetchNotNull(), - EnvVar.AWS_ACCESS_KEY_ID to EnvVar.AWS_ACCESS_KEY_ID.fetchNotNull(), - EnvVar.AWS_SECRET_ACCESS_KEY to EnvVar.AWS_SECRET_ACCESS_KEY.fetchNotNull(), - EnvVar.AWS_DEFAULT_REGION to EnvVar.AWS_DEFAULT_REGION.fetchNotNull(), - EnvVar.MINIO_ENDPOINT to EnvVar.MINIO_ENDPOINT.fetchNotNull(), - ) diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt deleted file mode 100644 index cfe79a375c1..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurer.kt +++ /dev/null @@ -1,405 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.Level -import ch.qos.logback.classic.LoggerContext -import ch.qos.logback.classic.boolex.JaninoEventEvaluator -import ch.qos.logback.classic.sift.SiftingAppender -import ch.qos.logback.classic.spi.Configurator -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.Appender -import ch.qos.logback.core.ConsoleAppender -import ch.qos.logback.core.Context -import ch.qos.logback.core.FileAppender -import ch.qos.logback.core.Layout -import ch.qos.logback.core.boolex.EventEvaluator -import ch.qos.logback.core.encoder.Encoder -import ch.qos.logback.core.encoder.LayoutWrappingEncoder -import ch.qos.logback.core.filter.EvaluatorFilter -import ch.qos.logback.core.hook.DefaultShutdownHook -import ch.qos.logback.core.rolling.FixedWindowRollingPolicy -import ch.qos.logback.core.rolling.RollingFileAppender -import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy -import ch.qos.logback.core.sift.AppenderFactory -import ch.qos.logback.core.sift.Discriminator -import ch.qos.logback.core.spi.ContextAwareBase -import ch.qos.logback.core.spi.FilterReply -import ch.qos.logback.core.util.Duration -import ch.qos.logback.core.util.FileSize -import ch.qos.logback.core.util.StatusPrinter2 -import io.airbyte.commons.envvar.EnvVar -import io.airbyte.commons.logging.DEFAULT_CLOUD_JOB_LOG_PATH_MDC_KEY -import io.airbyte.commons.logging.DEFAULT_CLOUD_WORKSPACE_MDC_KEY -import io.airbyte.commons.logging.DEFAULT_JOB_LOG_PATH_MDC_KEY -import io.airbyte.commons.logging.DEFAULT_LOG_FILENAME -import io.airbyte.commons.logging.DEFAULT_WORKSPACE_MDC_KEY -import io.airbyte.commons.storage.DocumentType -import org.slf4j.Logger.ROOT_LOGGER_NAME -import java.io.File -import java.nio.file.Path -import kotlin.io.path.isDirectory - -/** - * Custom Logback [Configurator] that configures Logback appenders and loggers for use in the platform. This configurator allows us to - * dynamically control the output of each logger and apply any additional logic prior to logging the message. - */ -class AirbyteLogbackCustomConfigurer : - ContextAwareBase(), - Configurator { - override fun configure(loggerContext: LoggerContext): Configurator.ExecutionStatus { - // Ensure that the logging context is stopped on application shutdown - registerShutdownHook(loggerContext = loggerContext) - - // Output any configuration errors - StatusPrinter2().printInCaseOfErrorsOrWarnings(loggerContext) - - // Create appenders - val appenders = - listOf( - createPlatformAppender(loggerContext = loggerContext), - createOperationsJobAppender(loggerContext = loggerContext), - createApplicationAppender(loggerContext = loggerContext), - createCloudApplicationAppender(loggerContext = loggerContext), - createCloudOperationsJobAppender(loggerContext = loggerContext), - ) - - // Register appenders with root logger - loggerContext.getLogger(ROOT_LOGGER_NAME).apply { - level = getLogLevel() - isAdditive = true - appenders.forEach { addAppender(it) } - } - - // Do not allow any other configurators to run after this. - // This prevents Logback from creating the default console appender for the root logger. - return Configurator.ExecutionStatus.DO_NOT_INVOKE_NEXT_IF_ANY - } - - /** - * Builds the appender for application log messages. This appender logs all messages to a rolling local file. - * - * @param loggerContext The logging context. - * @return The application appender. - */ - private fun createApplicationAppender(loggerContext: LoggerContext): Appender { - return createSiftingAppender( - appenderFactory = this::createApplicationRollingAppender, - appenderName = APPLICATION_LOGGER_NAME, - contextKey = DEFAULT_WORKSPACE_MDC_KEY, - loggerContext = loggerContext, - ) - } - - /** - * Builds a [RollingFileAppender] for application logs. - * - * @param context The logging context. - * @param discriminatorValue The discriminator value used to select this appender. - * @return A [RollingFileAppender] configured for the application logs. - */ - internal fun createApplicationRollingAppender( - context: Context, - discriminatorValue: String, - ): Appender { - val baseFile = "$discriminatorValue/$DEFAULT_LOG_FILENAME" - - // Ensure that the file exists before logging - touchFile(file = baseFile) - - val appender = RollingFileAppender() - appender.context = context - appender.name = "$discriminatorValue-local" - appender.encoder = createEncoder(context = context, layout = AirbytePlatformLogbackMessageLayout()) - appender.file = baseFile - - val triggeringPolicy = SizeBasedTriggeringPolicy() - triggeringPolicy.context = context - triggeringPolicy.maxFileSize = FileSize.valueOf(DEFAULT_MAX_LOG_FILE_SIZE) - triggeringPolicy.start() - - val rollingPolicy = FixedWindowRollingPolicy() - rollingPolicy.context = context - rollingPolicy.fileNamePattern = baseFile.replace(LOG_FILE_EXTENSION, ROLLING_FILE_NAME_PATTERN) - rollingPolicy.maxIndex = 3 - rollingPolicy.setParent(appender) - rollingPolicy.start() - - appender.rollingPolicy = rollingPolicy - appender.triggeringPolicy = triggeringPolicy - appender.start() - return appender - } - - /** - * Builds the cloud appender for application log messages. This appender logs all messages to remote storage. - * - * @param loggerContext The logging context. - * @return The cloud application appender. - */ - private fun createCloudApplicationAppender(loggerContext: LoggerContext): Appender { - val appenderFactory = { context: Context, discriminatorValue: String -> - createCloudAppender( - context = context, - discriminatorValue = discriminatorValue, - layout = AirbytePlatformLogbackMessageLayout(), - documentType = DocumentType.APPLICATION_LOGS, - appenderName = CLOUD_APPLICATION_LOGGER_NAME, - ) - } - - return createSiftingAppender( - appenderFactory = appenderFactory, - appenderName = CLOUD_APPLICATION_LOGGER_NAME, - contextKey = DEFAULT_CLOUD_WORKSPACE_MDC_KEY, - loggerContext = loggerContext, - ) - } - - /** - * Builds the appender for cloud operations job log messages. This appender logs all messages to remote storage. - * - * @param loggerContext The logging context. - * @return The cloud operations job appender. - */ - private fun createCloudOperationsJobAppender(loggerContext: LoggerContext): Appender { - val appenderFactory = { context: Context, discriminatorValue: String -> - createCloudAppender( - context = context, - discriminatorValue = discriminatorValue, - layout = AirbyteOperationsJobLogbackMessageLayout(), - documentType = DocumentType.LOGS, - appenderName = CLOUD_OPERATIONS_JOB_LOGGER_NAME, - ) - } - - return createSiftingAppender( - appenderFactory = appenderFactory, - appenderName = CLOUD_OPERATIONS_JOB_LOGGER_NAME, - contextKey = DEFAULT_CLOUD_JOB_LOG_PATH_MDC_KEY, - loggerContext = loggerContext, - ) - } - - /** - * Builds the appender for operations job log messages. This appender logs all messages to a local file. - * - * @param loggerContext The logging context. - * @return The operations job appender. - */ - private fun createOperationsJobAppender(loggerContext: LoggerContext): Appender { - return createSiftingAppender( - appenderFactory = this::createOperationsJobFileAppender, - appenderName = OPERATIONS_JOB_LOGGER_NAME, - contextKey = DEFAULT_JOB_LOG_PATH_MDC_KEY, - loggerContext = loggerContext, - ) - } - - /** - * Builds the operations job file appender for operations job log messages. - * - * @param context The logging context. - * @param discriminatorValue The discriminator value used to select this appender. - * @return A [FileAppender] configured for the operations job logs. - */ - internal fun createOperationsJobFileAppender( - context: Context, - discriminatorValue: String, - ): Appender { - val filePath = - if (Path.of(discriminatorValue).isDirectory()) { - Path.of(discriminatorValue, DEFAULT_LOG_FILENAME) - } else { - Path.of(discriminatorValue) - } - - // Ensure that the log file exists - touchFile(file = filePath.toString()) - - val appender = FileAppender() - appender.context = context - appender.encoder = createEncoder(context = context, layout = AirbyteOperationsJobLogbackMessageLayout()) - appender.file = filePath.toString() - appender.name = "$discriminatorValue-local" - appender.start() - return appender - } - - /** - * Builds an [AirbyteCloudStorageAppender] for remote logging of log messages. - * - * @param context The logging context. - * @param discriminatorValue The discriminator value used to select this appender. - * @param documentType The remote storage [DocumentType]. - * @param appenderName The base appender name. - * @param layout The log message [Layout]. - * @return An [AirbyteCloudStorageAppender] used to store logs remotely. - */ - internal fun createCloudAppender( - context: Context, - discriminatorValue: String, - documentType: DocumentType, - appenderName: String, - layout: Layout, - ): AirbyteCloudStorageAppender { - val appender = - AirbyteCloudStorageAppender( - encoder = createEncoder(context = context, layout = layout), - baseStorageId = discriminatorValue, - documentType = documentType, - ) - appender.context = context - appender.name = "$appenderName-$discriminatorValue" - appender.start() - return appender - } - - /** - * Builds the appender for platform log messages. This appender logs all messages to the console. - * - * @param loggerContext The logging context. - * @return The platform appender. - */ - internal fun createPlatformAppender(loggerContext: LoggerContext): ConsoleAppender = - ConsoleAppender().apply { - context = loggerContext - encoder = createEncoder(context = loggerContext, layout = AirbytePlatformLogbackMessageLayout()) - name = PLATFORM_LOGGER_NAME - start() - } - - /** - * Builds a [Discriminator] that is used to extract a value from the logging MDC. - * - * @param contextKey The key in the MDC that will be extracted if present and not blank. - * @param loggerContext The logging context. - * @return The [Discriminator]. - */ - private fun createDiscriminator( - contextKey: String, - loggerContext: LoggerContext, - ): Discriminator = - AirbyteStorageMDCBasedDiscriminator(mdcValueExtractor = { mdc -> mdc[contextKey] ?: "" }).apply { - context = loggerContext - start() - } - - /** - * Builds the [Encoder] used to format the logging event message. - * - * @param context The logging [Context]. - * @param layout The logging message [Layout] to be applied to the message. - * @return The [Encoder]. - */ - private fun createEncoder( - context: Context, - layout: Layout, - ): Encoder { - layout.context = context - layout.start() - - return LayoutWrappingEncoder().apply { - this.context = context - this.layout = layout - } - } - - /** - * Builds an [EventEvaluator] that tests whether the MDC contains a non-blank value - * for the provided `contextKey`. This evaluator is used to avoid routing logging - * events to the [SiftingAppender] when the event does not contain the required MDC property. - * - * @param contextKey The key in the MDC to be checked. - * @param loggerContext The logging context. - * @return The [EventEvaluator] that checks the provided `contextKey` in the MDC. - */ - private fun createEvaluator( - contextKey: String, - loggerContext: LoggerContext, - ): EventEvaluator = - JaninoEventEvaluator().apply { - context = loggerContext - expression = """mdc.get("$contextKey") == null || mdc.get("$contextKey") == """"" - start() - } - - /** - * Builds an [EvaluatorFilter] that denys matching the logging event - * to the [SiftingAppender] if the provided [EventEvaluator] expression - * returns `true`. This is used to avoid routing events with missing MDC properties - * to the appender. - * - * @param evaluator An [EventEvaluator] to be used by the filter. - * @param loggerContext The logging context. - * @return An [EvaluatorFilter] that denies matches when the provided evaluator results in a `true` comparison. - */ - private fun createFilter( - evaluator: EventEvaluator, - loggerContext: LoggerContext, - ): EvaluatorFilter = - EvaluatorFilter().apply { - context = loggerContext - this.evaluator = evaluator - onMatch = FilterReply.DENY - onMismatch = FilterReply.NEUTRAL - start() - } - - /** - * Builds a [SiftingAppender] that is invoked when the provided `contextKey` is present - * in the MDC. Once created, the appender will expire after disuse to ensure proper cleanup. - * - * @param appenderFactory An [AppenderFactory] used to create an appender when the logging event matches the provided filter. - * @param contextKey The key in the MDC that is used to filter logging events. - * @param appenderName The name to apply to the appender. - * @param loggerContext The logging context. - * @return A [SiftingAppender] that creates dynamic appenders based on the value returned by a [Discriminator]. - */ - internal fun createSiftingAppender( - appenderFactory: AppenderFactory, - contextKey: String, - appenderName: String, - loggerContext: LoggerContext, - ): SiftingAppender { - val discriminator = createDiscriminator(contextKey = contextKey, loggerContext = loggerContext) - val evaluator = createEvaluator(contextKey = contextKey, loggerContext = loggerContext) - val filter = createFilter(evaluator = evaluator, loggerContext = loggerContext) - - return SiftingAppender().apply { - setAppenderFactory(appenderFactory) - context = loggerContext - this.discriminator = discriminator - name = appenderName - timeout = Duration.valueOf("$APPENDER_TIMEOUT minutes") - addFilter(filter) - start() - } - } - - /** - * Registers a shutdown hook with the JVM to ensure that the logging context is stopped - * on JVM exit. This ensures that any active appender is stopped, allowing them to - * publish any pending logging events. - * - * @param loggerContext The logging context. - */ - private fun registerShutdownHook(loggerContext: LoggerContext) { - val shutdownHook = DefaultShutdownHook().apply { context = loggerContext } - Runtime.getRuntime().addShutdownHook(Thread { shutdownHook.run() }) - } -} - -private const val DEFAULT_APPENDER_TIMEOUT_MIN = "15" -const val DEFAULT_MAX_LOG_FILE_SIZE = "100MB" -const val LOG_FILE_EXTENSION = ".log" -const val ROLLING_FILE_NAME_PATTERN = ".%i$LOG_FILE_EXTENSION.gz" -val APPENDER_TIMEOUT = EnvVar.LOG_IDLE_ROUTE_TTL.fetchNotNull(default = DEFAULT_APPENDER_TIMEOUT_MIN) - -private fun getLogLevel(): Level = Level.toLevel(EnvVar.LOG_LEVEL.fetchNotNull(default = Level.INFO.toString())) - -private fun touchFile(file: String) { - File(file).createNewFile() -} diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt deleted file mode 100644 index 29b16709b53..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackUtils.kt +++ /dev/null @@ -1,39 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -/** - * The qualified class name of the calling code for logging purposes. This - * key should be added to the MDC when the point of logging is not the point - * in the code that should be recorded by the logger layout. - */ -const val CALLER_QUALIFIED_CLASS_NAME_PATTERN = "CALLER_FQCN" - -/** - * The line number of the calling code for logging purposes. This - * key should be added to the MDC when the point of logging is not the point - * in the code that should be recorded by the logger layout. - */ -const val CALLER_LINE_NUMBER_PATTERN = "CALLER_LINE_NUMBER" - -/** - * The method name of the calling code for logging purposes. This - * key should be added to the MDC when the point of logging is not the point - * in the code that should be recorded by the logger layout. - */ -const val CALLER_METHOD_NAME_PATTERN = "CALLER_METHOD_NAME" - -/** - * The thread name of the calling code for logging purposes. This - * key should be added to the MDC when the point of logging is not the point - * in the code that should be recorded by the logger layout. - */ -const val CALLER_THREAD_NAME_PATTERN = "CALLER_THREAD_NAME" - -const val APPLICATION_LOGGER_NAME = "airbyte-application-logger" -const val CLOUD_APPLICATION_LOGGER_NAME = "airbyte-cloud-application-logger" -const val CLOUD_OPERATIONS_JOB_LOGGER_NAME = "airbyte-cloud-operations-job-logger" -const val OPERATIONS_JOB_LOGGER_NAME = "airbyte-operations-job-logger" -const val PLATFORM_LOGGER_NAME = "airbyte-platform-logger" diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt deleted file mode 100644 index 0b33ff8e465..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayout.kt +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.pattern.ThrowableProxyConverter -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR -import ch.qos.logback.core.LayoutBase -import io.airbyte.commons.logging.LoggingHelper -import io.airbyte.commons.logging.LoggingHelper.LOG_SOURCE_MDC_KEY -import java.time.Instant -import java.time.ZoneId -import java.time.format.DateTimeFormatter - -/** - * Custom Logback message layout that formats the message for operations job log messages. - */ -class AirbyteOperationsJobLogbackMessageLayout : LayoutBase() { - private val throwableConverter = ThrowableProxyConverter() - private val maskedDataConverter = MaskedDataConverter() - - init { - throwableConverter.start() - } - - override fun doLayout(loggingEvent: ILoggingEvent): String { - val logSource = loggingEvent.mdcPropertyMap.getOrDefault(LOG_SOURCE_MDC_KEY, LoggingHelper.platformLogSource()) - - return buildString { - append( - Instant.ofEpochMilli(loggingEvent.timeStamp).atZone(UTC_ZONE_ID).format(EVENT_TIMESTAMP_FORMATTER), - ) - append(" ") - append("$logSource > ") - append(maskedDataConverter.convert(event = loggingEvent)) - loggingEvent.throwableProxy?.let { - append("$LINE_SEPARATOR${throwableConverter.convert(loggingEvent)}") - } - append(LINE_SEPARATOR) - } - } -} - -private val UTC_ZONE_ID = ZoneId.of("UTC") -private val EVENT_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd HH:mm:ss") diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt deleted file mode 100644 index a4b2b29aae3..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayout.kt +++ /dev/null @@ -1,126 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.Level -import ch.qos.logback.classic.pattern.ClassOfCallerConverter -import ch.qos.logback.classic.pattern.LineOfCallerConverter -import ch.qos.logback.classic.pattern.MethodOfCallerConverter -import ch.qos.logback.classic.pattern.ThreadConverter -import ch.qos.logback.classic.pattern.ThrowableProxyConverter -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.CoreConstants.DASH_CHAR -import ch.qos.logback.core.CoreConstants.ISO8601_PATTERN -import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR -import ch.qos.logback.core.CoreConstants.TAB -import ch.qos.logback.core.LayoutBase -import ch.qos.logback.core.pattern.color.ANSIConstants -import ch.qos.logback.core.pattern.color.ANSIConstants.DEFAULT_FG -import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_END -import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_START -import ch.qos.logback.core.pattern.color.ANSIConstants.RESET -import java.time.Instant -import java.time.ZoneId -import java.time.format.DateTimeFormatter - -/** - * Custom Logback message layout that formats the message for platform log messages (e.g. STDOUT). - */ -class AirbytePlatformLogbackMessageLayout : LayoutBase() { - private val classOfCallerConverter = ClassOfCallerConverter() - private val lineOfCallerConverter = LineOfCallerConverter() - private val methodOfCallerConverter = MethodOfCallerConverter() - private val threadConverter = ThreadConverter() - private val throwableConverter = ThrowableProxyConverter() - private val maskedDataConverter = MaskedDataConverter() - private val ciMode = System.getProperty(CI_MODE_SYSTEM_PROPERTY, "false").toBoolean() - - init { - throwableConverter.start() - } - - override fun doLayout(loggingEvent: ILoggingEvent): String = - buildString { - append( - Instant.ofEpochMilli(loggingEvent.timeStamp).atZone(UTC_ZONE_ID).format(EVENT_TIMESTAMP_FORMATTER), - ) - - append(" ") - - /* - * Add DataDog trace/span ID's to log messages if CI mode is enabled and the log - * message is not for the job log. - */ - if (ciMode) { - append( - "[dd.trace_id=${loggingEvent.mdcPropertyMap[DATADOG_TRACE_ID_KEY]} " + - "dd.span_id=${loggingEvent.mdcPropertyMap[DATADOG_SPAN_ID_KEY]}] ", - ) - } - - append("[") - if (loggingEvent.mdcPropertyMap.containsKey(CALLER_THREAD_NAME_PATTERN)) { - append(loggingEvent.mdcPropertyMap[CALLER_THREAD_NAME_PATTERN]) - } else { - append(threadConverter.convert(loggingEvent)) - } - append("]$TAB") - append("$ESC_START${getHighlightColor(loggingEvent = loggingEvent)}$ESC_END${loggingEvent.level}$DEFAULT_COLOR$TAB") - if (loggingEvent.mdcPropertyMap.containsKey(CALLER_QUALIFIED_CLASS_NAME_PATTERN)) { - append( - "${formatClassName(loggingEvent.mdcPropertyMap[CALLER_QUALIFIED_CLASS_NAME_PATTERN])}" + - "(${loggingEvent.mdcPropertyMap[CALLER_METHOD_NAME_PATTERN]}):" + - "${loggingEvent.mdcPropertyMap[CALLER_LINE_NUMBER_PATTERN]} $DASH_CHAR ", - ) - } else { - append( - "${formatClassName(classOfCallerConverter.convert(loggingEvent))}(${methodOfCallerConverter.convert(loggingEvent)})" + - ":${lineOfCallerConverter.convert(loggingEvent)} $DASH_CHAR ", - ) - } - append(maskedDataConverter.convert(event = loggingEvent)) - loggingEvent.throwableProxy?.let { - append("$LINE_SEPARATOR${throwableConverter.convert(loggingEvent)}") - } - append(LINE_SEPARATOR) - } -} - -internal const val DEFAULT_COLOR = ESC_START + RESET + DEFAULT_FG + ESC_END -internal const val CI_MODE_SYSTEM_PROPERTY = "ciMode" -internal const val DATADOG_SPAN_ID_KEY = "dd.span_id" -internal const val DATADOG_TRACE_ID_KEY = "dd.trace_id" -private val EVENT_TIMESTAMP_FORMATTER = DateTimeFormatter.ofPattern(ISO8601_PATTERN) -private val UTC_ZONE_ID = ZoneId.of("UTC") - -/** - * Formats the fully qualified class name to mimic the same behavior as the ``{1.}`` option - * in a Log4j pattern layout format string. - * - * @param className a fully qualified class name - * @returns The formatted fully qualified class name. - */ -internal fun formatClassName(className: String?): String? { - return className?.let { - val parts = className.split('.') - return "${parts.subList(0, parts.size - 1).joinToString(".") { s -> s.substring(0, 1) }}.${parts.last()}" - } -} - -/** - * Returns the appropriate highlight color based on the level associated with the provided logging event. - * This method is adapted from [ch.qos.logback.classic.pattern.color.HighlightingCompositeConverter] used - * by Logback to color levels in log output. - * - * @param loggingEvent The logging event that contains the log level. - * @return The ANSI color code associated with the log level. - */ -private fun getHighlightColor(loggingEvent: ILoggingEvent): String = - when (loggingEvent.level.toInt()) { - Level.ERROR_INT -> ANSIConstants.BOLD + ANSIConstants.RED_FG - Level.WARN_INT -> ANSIConstants.RED_FG - Level.INFO_INT -> ANSIConstants.BLUE_FG - else -> DEFAULT_FG - } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt deleted file mode 100644 index fe88ddb8d64..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminator.kt +++ /dev/null @@ -1,21 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.sift.AbstractDiscriminator - -/** - * Custom Logback [ch.qos.logback.core.sift.Discriminator] implementation that uses the - * job log path MDC value as a discriminator for appender creation. - */ -class AirbyteStorageMDCBasedDiscriminator( - private val mdcValueExtractor: (Map) -> String, -) : AbstractDiscriminator() { - // Not implemented/not used. - override fun getKey(): String = "" - - override fun getDiscriminatingValue(event: ILoggingEvent): String = mdcValueExtractor(event.mdcPropertyMap) -} diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt deleted file mode 100644 index 544ef7b328a..00000000000 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverter.kt +++ /dev/null @@ -1,156 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.pattern.ClassicConverter -import ch.qos.logback.classic.spi.ILoggingEvent -import com.fasterxml.jackson.core.type.TypeReference -import io.airbyte.commons.constants.AirbyteCatalogConstants.LOCAL_SECRETS_MASKS_PATH -import io.airbyte.commons.constants.AirbyteSecretConstants -import io.airbyte.commons.yaml.Yamls -import java.nio.charset.Charset -import java.util.regex.Pattern - -/** - * Custom [ClassicConverter] used to intercept all log messages and mask any JSON - * properties in the message that match the list of maskable properties. - *

- * The maskable properties file is generated by a Gradle task in the - * {@code :oss:airbyte-config:specs} project. The file is named {@code specs_secrets_mask.yaml} and - * is located in the {@code src/main/resources/seed} directory of the - * {@code :oss:airbyte-config:init} project. - */ -class MaskedDataConverter( - specMaskFile: String = LOCAL_SECRETS_MASKS_PATH, -) : ClassicConverter() { - private val pattern: Pattern? = buildPattern(specMaskFile = specMaskFile) - private val replacePattern = API_KEY_PATTERN.toPattern() - - override fun convert(event: ILoggingEvent): String = replace(message = applyMask(message = event.formattedMessage)) - - /** - * Applies the mask to the message, if necessary. - * - * @param message The log message. - * @return The possibly masked log message. - */ - private fun applyMask(message: String): String { - val piiScrubbedMessage = removeKnownPii(message) - return pattern?.matcher(piiScrubbedMessage)?.replaceAll(REPLACEMENT_PATTERN) ?: piiScrubbedMessage - } - - /** - * Builds the maskable property matching pattern. - * - * @param specMaskFile The spec mask file. - * @return The regular expression pattern used to find maskable properties. - */ - private fun buildPattern(specMaskFile: String): Pattern? = - getMaskableProperties(specMaskFile).takeIf { it.isNotEmpty() }?.let { - generatePattern(it).toPattern() - } - - /** - * Generates the property matching pattern string from the provided set of properties. - * - * @param properties The set of properties to match. - * @return The generated regular expression pattern used to match the maskable properties. - */ - private fun generatePattern(properties: Set): String = - buildString { - append(CASE_INSENSITIVE_FLAG) - append(PROPERTY_MATCHING_PATTERN_PREFIX) - append(properties.joinToString("|")) - append(PROPERTY_MATCHING_PATTERN_SUFFIX) - } - - /** - * Loads the maskable properties from the provided file. - * - * @param specMaskFile The spec mask file. - * @return The set of maskable properties. - */ - private fun getMaskableProperties(specMaskFile: String): Set { - return runCatching { - val maskFileContents = - javaClass.getResourceAsStream(specMaskFile)?.readBytes()?.toString(Charset.defaultCharset()) - ?: return setOf() - - val properties: Map> = - Yamls.deserialize(maskFileContents, object : TypeReference>>() {}) - - properties.getOrDefault(PROPERTIES_KEY, setOf()) - }.getOrDefault(setOf()) - } - - /** - * Code-based implementation of the `replace(message){r, t}` macro in Logback/Log4j configuration. - * - * @param message The message to apply replacement to. - * @return The potentially modified message with any speciric patterns replaced. - */ - private fun replace(message: String): String = replacePattern.matcher(message).replaceAll(API_KEY_REPLACEMENT) -} - -/** - * Regular expression to match api keys in strings. Ported from previous Log4j2 configuration. - */ -private const val API_KEY_PATTERN = """apikey=[\w\-]*""" - -/** - * Replacement pattern for matches using the [API_KEY_PATTERN] regular expression. - */ -private const val API_KEY_REPLACEMENT = "apikey=${AirbyteSecretConstants.SECRETS_MASK}" - -/** - * Regular expression pattern flag that enables case in-sensitive matching. - */ -private const val CASE_INSENSITIVE_FLAG: String = "(?i)" - -// This is a little circuitous, but it gets the regex syntax highlighting in intelliJ to work. -private val DESTINATION_ERROR_PREFIX: String = """^(?.*destination.*\s+>\s+ERROR.+)""".toPattern().pattern() - -/** - * Regular expression replacement pattern for applying the mask to PII log messages. - */ -private const val KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN: String = "\${destinationPrefix}\${messagePrefix}${AirbyteSecretConstants.SECRETS_MASK}" - -private val KNOWN_PII_PATTERNS: List = - listOf( - """$DESTINATION_ERROR_PREFIX(?Received\s+invalid\s+message:)(.+)$""".toPattern(), - """$DESTINATION_ERROR_PREFIX(?org\.jooq\.exception\.DataAccessException: SQL.+values\s+\()(.+)$""".toPattern(), - ) - -/** - * Name of the key in the mask YAML file that contains the list of maskable properties. - */ -private const val PROPERTIES_KEY: String = "properties" - -/** - * Regular expression pattern prefix for applying the mask to property values. - */ -private const val PROPERTY_MATCHING_PATTERN_PREFIX: String = """"(""" - -/** - * Regular expression pattern suffix for applying the mask to property values. - */ -private const val PROPERTY_MATCHING_PATTERN_SUFFIX: String = """)"\s*:\s*("(?:[^"\\]|\\.)*"|\[[^]\[]*]|\d+)""" - -/** - * Regular expression pattern used to replace a key/value property with a masked value while - * maintaining the property key/name. - */ -private const val REPLACEMENT_PATTERN: String = """"$1":"${AirbyteSecretConstants.SECRETS_MASK}"""" - -/** - * Removes known PII from the message. - * - * @param message the log line - * @return a redacted log line - */ -private fun removeKnownPii(message: String): String = - KNOWN_PII_PATTERNS.fold(message) { msg, pattern -> - pattern.matcher(msg).replaceAll(KNOWN_PII_LOG_MESSAGE_REPLACEMENT_PATTERN) - } diff --git a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt index b7a79e944b1..10118b5e14b 100644 --- a/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt +++ b/airbyte-commons-storage/src/main/kotlin/io/airbyte/commons/storage/StorageClient.kt @@ -9,7 +9,6 @@ import com.azure.storage.blob.BlobServiceClientBuilder import com.google.auth.oauth2.ServiceAccountCredentials import com.google.cloud.storage.BlobId import com.google.cloud.storage.BlobInfo -import com.google.cloud.storage.BucketInfo import com.google.cloud.storage.Storage import com.google.cloud.storage.StorageOptions import com.google.common.annotations.VisibleForTesting @@ -25,10 +24,8 @@ import software.amazon.awssdk.auth.credentials.AwsBasicCredentials import software.amazon.awssdk.core.sync.RequestBody import software.amazon.awssdk.regions.Region import software.amazon.awssdk.services.s3.S3Client -import software.amazon.awssdk.services.s3.model.CreateBucketRequest import software.amazon.awssdk.services.s3.model.DeleteObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectRequest -import software.amazon.awssdk.services.s3.model.HeadBucketRequest import software.amazon.awssdk.services.s3.model.HeadObjectRequest import software.amazon.awssdk.services.s3.model.NoSuchKeyException import software.amazon.awssdk.services.s3.model.PutObjectRequest @@ -71,7 +68,6 @@ class StorageClientFactory( enum class DocumentType( val prefix: Path, ) { - APPLICATION_LOGS(prefix = Path.of("/app-logging")), LOGS(prefix = Path.of("/job-logging")), STATE(prefix = Path.of("/state")), WORKLOAD_OUTPUT(prefix = Path.of("/workload/output")), @@ -132,10 +128,6 @@ class AzureStorageClient( @Parameter type: DocumentType, ) : this(config = config, type = type, azureClient = config.azureClient()) - init { - runCatching { createBucketIfNotExists() } - } - override fun write( id: String, document: String, @@ -162,13 +154,6 @@ class AzureStorageClient( .deleteIfExists() internal fun key(id: String): String = "${type.prefix}/$id" - - private fun createBucketIfNotExists() { - val blobContainerClient = azureClient.getBlobContainerClient(bucketName) - if (!blobContainerClient.exists()) { - blobContainerClient.createIfNotExists() - } - } } /** @@ -192,10 +177,6 @@ class GcsStorageClient( @Parameter type: DocumentType, ) : this(config = config, type = type, gcsClient = config.gcsClient()) - init { - runCatching { createBucketIfNotExists() } - } - override fun write( id: String, document: String, @@ -219,12 +200,6 @@ class GcsStorageClient( @VisibleForTesting internal fun blobId(id: String): BlobId = BlobId.of(bucketName, key(id)) - - private fun createBucketIfNotExists() { - if (gcsClient.get(bucketName) == null) { - gcsClient.create(BucketInfo.of(bucketName)) - } - } } /** @@ -319,10 +294,6 @@ abstract class AbstractS3StorageClient internal constructor( ) : StorageClient { private val bucketName = config.bucketName(type) - init { - runCatching { createBucketIfNotExists() } - } - override fun write( id: String, document: String, @@ -377,23 +348,6 @@ abstract class AbstractS3StorageClient internal constructor( } internal fun key(id: String): String = "${type.prefix}/$id" - - private fun createBucketIfNotExists() { - if (!doesBucketExist(bucketName=bucketName)) { - val createBucketRequest = CreateBucketRequest.builder().bucket(bucketName).build() - s3Client.createBucket(createBucketRequest) - } - } - - private fun doesBucketExist(bucketName: String): Boolean { - val headBucketRequest = HeadBucketRequest.builder().bucket(bucketName).build() - return try { - s3Client.headBucket(headBucketRequest) - true - } catch (e: Exception) { - false - } - } } /** @@ -463,7 +417,6 @@ fun StorageConfig.bucketName(type: DocumentType): String = when (type) { DocumentType.STATE -> this.buckets.state DocumentType.WORKLOAD_OUTPUT -> this.buckets.workloadOutput - DocumentType.APPLICATION_LOGS -> this.buckets.log DocumentType.LOGS -> this.buckets.log DocumentType.ACTIVITY_PAYLOADS -> this.buckets.activityPayload } diff --git a/airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator b/airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator deleted file mode 100644 index 2ed89deda99..00000000000 --- a/airbyte-commons-storage/src/main/resources/META-INF/services/ch.qos.logback.classic.spi.Configurator +++ /dev/null @@ -1 +0,0 @@ -io.airbyte.commons.logging.logback.AirbyteLogbackCustomConfigurer \ No newline at end of file diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt deleted file mode 100644 index 1e31a1d54dd..00000000000 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteCloudStorageAppenderTest.kt +++ /dev/null @@ -1,192 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.Context -import ch.qos.logback.core.encoder.Encoder -import ch.qos.logback.core.status.Status -import ch.qos.logback.core.status.StatusManager -import io.airbyte.commons.envvar.EnvVar -import io.airbyte.commons.resources.MoreResources -import io.airbyte.commons.storage.AzureStorageClient -import io.airbyte.commons.storage.DocumentType -import io.airbyte.commons.storage.GcsStorageClient -import io.airbyte.commons.storage.LocalStorageClient -import io.airbyte.commons.storage.MinioStorageClient -import io.airbyte.commons.storage.S3StorageClient -import io.airbyte.commons.storage.StorageClient -import io.mockk.every -import io.mockk.mockk -import io.mockk.verify -import org.junit.jupiter.api.AfterEach -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import java.nio.file.Files -import java.util.concurrent.TimeUnit -import kotlin.io.path.Path - -private class AirbyteCloudStorageAppenderTest { - @AfterEach - fun tearDown() { - Files.newDirectoryStream(Path("."), "*.log").use { stream -> - stream.forEach { Files.deleteIfExists(it) } - } - } - - @Test - fun testBuildBucketConfig() { - val bucket = "test-bucket" - val storageConfig = - mapOf( - EnvVar.STORAGE_BUCKET_LOG to bucket, - ) - val bucketConfig = buildBucketConfig(storageConfig) - assertEquals(bucket, bucketConfig.log) - assertEquals("", bucketConfig.state) - assertEquals("", bucketConfig.workloadOutput) - assertEquals("", bucketConfig.activityPayload) - } - - @Test - fun testBuildAzureStorageClient() { - val bucket = "test-bucket" - val connectionString = "AccountName=test;AccountKey=test-key" - val storageType = "azure" - val storageConfig = - mapOf( - EnvVar.STORAGE_TYPE to storageType, - EnvVar.STORAGE_BUCKET_LOG to bucket, - EnvVar.AZURE_STORAGE_CONNECTION_STRING to connectionString, - ) - val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) - assertEquals(AzureStorageClient::class.java, client.javaClass) - } - - @Test - fun testBuildGcsStorageClient() { - val bucket = "test-bucket" - val applicationCredentials = MoreResources.readResourceAsFile("sample_gcs_credentials.json") - val credentials = applicationCredentials.path - val storageType = "gcs" - val storageConfig = - mapOf( - EnvVar.STORAGE_TYPE to storageType, - EnvVar.STORAGE_BUCKET_LOG to bucket, - EnvVar.GOOGLE_APPLICATION_CREDENTIALS to credentials, - ) - val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) - assertEquals(GcsStorageClient::class.java, client.javaClass) - } - - @Test - fun testBuildMinioStorageClient() { - val bucket = "test-bucket" - val accessKey = "test_access_key" - val accessSecretKey = "test_access_secret_key" - val endpoint = "test-endpoint:9000" - val storageType = "minio" - val storageConfig = - mapOf( - EnvVar.STORAGE_TYPE to storageType, - EnvVar.STORAGE_BUCKET_LOG to bucket, - EnvVar.AWS_ACCESS_KEY_ID to accessKey, - EnvVar.AWS_SECRET_ACCESS_KEY to accessSecretKey, - EnvVar.MINIO_ENDPOINT to endpoint, - ) - val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) - assertEquals(MinioStorageClient::class.java, client.javaClass) - } - - @Test - fun testBuildS3StorageClient() { - val bucket = "test-bucket" - val accessKey = "test_access_key" - val accessSecretKey = "test_access_secret_key" - val region = "US-EAST-1" - val storageType = "s3" - val storageConfig = - mapOf( - EnvVar.STORAGE_TYPE to storageType, - EnvVar.STORAGE_BUCKET_LOG to bucket, - EnvVar.AWS_ACCESS_KEY_ID to accessKey, - EnvVar.AWS_SECRET_ACCESS_KEY to accessSecretKey, - EnvVar.AWS_DEFAULT_REGION to region, - ) - val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) - assertEquals(S3StorageClient::class.java, client.javaClass) - } - - @Test - fun testBuildLocalStorageClient() { - val bucket = "test-bucket" - val storageType = "local" - val storageConfig = - mapOf( - EnvVar.STORAGE_TYPE to storageType, - EnvVar.STORAGE_BUCKET_LOG to bucket, - ) - val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) - assertEquals(LocalStorageClient::class.java, client.javaClass) - } - - @Test - fun testBuildDefaultStorageClient() { - val bucket = "test-bucket" - val storageType = "unknown" - val storageConfig = - mapOf( - EnvVar.STORAGE_TYPE to storageType, - EnvVar.STORAGE_BUCKET_LOG to bucket, - ) - val client = buildStorageClient(storageConfig = storageConfig, documentType = DocumentType.LOGS) - assertEquals(LocalStorageClient::class.java, client.javaClass) - - val storageConfig2 = - mapOf( - EnvVar.STORAGE_BUCKET_LOG to bucket, - ) - val client2 = buildStorageClient(storageConfig = storageConfig2, documentType = DocumentType.LOGS) - assertEquals(LocalStorageClient::class.java, client2.javaClass) - } - - @Test - fun testStorageUpload() { - val baseStorageId = "/path/to/logs" - val storageClient = mockk() - val event = mockk() - val period = 1L - val statusManager = - mockk { - every { add(any()) } returns Unit - } - val context = - mockk { - every { getStatusManager() } returns statusManager - } - val encoder = - mockk> { - every { encode(any()) } returns "some test log message".toByteArray(Charsets.UTF_8) - } - - val appender = - AirbyteCloudStorageAppender( - documentType = DocumentType.LOGS, - storageClient = storageClient, - baseStorageId = baseStorageId, - encoder = encoder, - period = period, - unit = TimeUnit.SECONDS, - ) - appender.context = context - appender.start() - - appender.doAppend(event) - - Thread.sleep(TimeUnit.SECONDS.toMillis(period * 2)) - - verify(exactly = 1) { storageClient.write(any(), any()) } - } -} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt deleted file mode 100644 index baa8004f2f6..00000000000 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteLogbackCustomConfigurerTest.kt +++ /dev/null @@ -1,213 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.LoggerContext -import ch.qos.logback.classic.sift.SiftingAppender -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.core.Context -import ch.qos.logback.core.FileAppender -import ch.qos.logback.core.OutputStreamAppender -import ch.qos.logback.core.encoder.LayoutWrappingEncoder -import ch.qos.logback.core.rolling.FixedWindowRollingPolicy -import ch.qos.logback.core.rolling.RollingFileAppender -import ch.qos.logback.core.rolling.SizeBasedTriggeringPolicy -import ch.qos.logback.core.sift.AppenderFactory -import ch.qos.logback.core.status.Status -import ch.qos.logback.core.status.StatusManager -import ch.qos.logback.core.util.Duration -import ch.qos.logback.core.util.FileSize -import io.airbyte.commons.logging.DEFAULT_LOG_FILENAME -import io.airbyte.commons.storage.DocumentType -import io.mockk.every -import io.mockk.mockk -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Assertions.assertTrue -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test -import java.nio.file.Files -import java.nio.file.Path -import kotlin.io.path.exists -import kotlin.io.path.pathString - -private class AirbyteLogbackCustomConfigurerTest { - private lateinit var configurer: AirbyteLogbackCustomConfigurer - - @BeforeEach - fun setUp() { - configurer = AirbyteLogbackCustomConfigurer() - } - - @Test - fun testCreateApplicationRollingAppender() { - val context = - mockk { - every { getObject(any()) } returns mutableMapOf() - every { statusManager } returns - mockk { - every { add(any()) } returns Unit - } - } - val discriminatorValue = Files.createTempDirectory("test-1").pathString - val appender = configurer.createApplicationRollingAppender(context = context, discriminatorValue = discriminatorValue) - - assertEquals(RollingFileAppender::class.java, appender.javaClass) - assertEquals(context, appender.context) - assertEquals("$discriminatorValue-local", appender.name) - assertEquals( - AirbytePlatformLogbackMessageLayout::class.java, - ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, - ) - assertEquals("$discriminatorValue/$DEFAULT_LOG_FILENAME", (appender as RollingFileAppender).file) - - assertEquals(FixedWindowRollingPolicy::class.java, appender.rollingPolicy.javaClass) - assertEquals( - "$discriminatorValue/$DEFAULT_LOG_FILENAME".replace(LOG_FILE_EXTENSION, ROLLING_FILE_NAME_PATTERN), - (appender.rollingPolicy as FixedWindowRollingPolicy).fileNamePattern, - ) - assertEquals(3, (appender.rollingPolicy as FixedWindowRollingPolicy).maxIndex) - - assertEquals(SizeBasedTriggeringPolicy::class.java, appender.triggeringPolicy.javaClass) - assertEquals(FileSize.valueOf(DEFAULT_MAX_LOG_FILE_SIZE), (appender.triggeringPolicy as SizeBasedTriggeringPolicy).maxFileSize) - - assertTrue(appender.isStarted) - assertTrue(Path.of(appender.file).exists()) - } - - @Test - fun testCreateOperationsJobFileAppender() { - val context = - mockk { - every { getObject(any()) } returns mutableMapOf() - every { statusManager } returns - mockk { - every { add(any()) } returns Unit - } - } - val discriminatorValue = Files.createTempDirectory("test-2").pathString - val appender = configurer.createOperationsJobFileAppender(context = context, discriminatorValue = discriminatorValue) - - assertEquals(FileAppender::class.java, appender.javaClass) - assertEquals(context, appender.context) - assertEquals("$discriminatorValue/$DEFAULT_LOG_FILENAME", (appender as FileAppender).file) - assertEquals("$discriminatorValue-local", appender.name) - assertEquals( - AirbyteOperationsJobLogbackMessageLayout::class.java, - ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, - ) - - assertTrue(appender.isStarted) - assertTrue(Path.of(appender.file).exists()) - } - - @Test - fun testCreateOperationsJobFileAppenderWithFileDiscriminator() { - val context = - mockk { - every { getObject(any()) } returns mutableMapOf() - every { statusManager } returns - mockk { - every { add(any()) } returns Unit - } - } - val discriminatorValue = Files.createTempFile("test-2", "other.log").pathString - val appender = configurer.createOperationsJobFileAppender(context = context, discriminatorValue = discriminatorValue) - - assertEquals(FileAppender::class.java, appender.javaClass) - assertEquals(context, appender.context) - assertEquals(discriminatorValue, (appender as FileAppender).file) - assertEquals("$discriminatorValue-local", appender.name) - assertEquals( - AirbyteOperationsJobLogbackMessageLayout::class.java, - ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, - ) - - assertTrue(appender.isStarted) - assertTrue(Path.of(appender.file).exists()) - } - - @Test - fun testCreatePlatformConsoleAppender() { - val context = - mockk { - every { getObject(any()) } returns mutableMapOf() - every { statusManager } returns - mockk { - every { add(any()) } returns Unit - } - } - val appender = configurer.createPlatformAppender(loggerContext = context) - assertEquals(context, appender.context) - assertEquals(PLATFORM_LOGGER_NAME, appender.name) - assertEquals( - AirbytePlatformLogbackMessageLayout::class.java, - ((appender as OutputStreamAppender).encoder as LayoutWrappingEncoder).layout.javaClass, - ) - - assertTrue(appender.isStarted) - } - - @Test - fun testCreateAirbyteCloudStorageAppender() { - val context = - mockk { - every { getObject(any()) } returns mutableMapOf() - every { statusManager } returns - mockk { - every { add(any()) } returns Unit - } - } - val appenderName = "test-appender" - val discriminatorValue = "/workspace/1" - val documentType = DocumentType.LOGS - val layout = AirbytePlatformLogbackMessageLayout() - val appender = - configurer.createCloudAppender( - context = context, - discriminatorValue = discriminatorValue, - documentType = documentType, - appenderName = appenderName, - layout = layout, - ) - - assertEquals(AirbyteCloudStorageAppender::class.java, appender.javaClass) - assertEquals(context, appender.context) - assertEquals("$appenderName-$discriminatorValue", appender.name) - assertEquals(documentType, appender.documentType) - assertEquals(layout.javaClass, (appender.encoder as LayoutWrappingEncoder).layout.javaClass) - assertEquals(discriminatorValue, appender.baseStorageId) - - assertTrue(appender.isStarted) - } - - @Test - fun testCreateSiftingAppender() { - val loggerContext = - mockk { - every { getObject(any()) } returns mutableMapOf() - every { statusManager } returns - mockk { - every { add(any()) } returns Unit - } - } - val appenderFactory = mockk>() - val appenderName = "test-appender" - val contextKey = "test-context-key" - val appender = - configurer.createSiftingAppender( - appenderFactory = appenderFactory, - contextKey = contextKey, - appenderName = appenderName, - loggerContext = loggerContext, - ) - - assertEquals(SiftingAppender::class.java, appender.javaClass) - assertEquals(loggerContext, appender.context) - assertEquals(appenderName, appender.name) - assertEquals(Duration.valueOf("$APPENDER_TIMEOUT minutes").milliseconds, appender.timeout.milliseconds) - - assertTrue(appender.isStarted) - } -} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt deleted file mode 100644 index e2db0c4316d..00000000000 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteOperationsJobLogbackMessageLayoutTest.kt +++ /dev/null @@ -1,131 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.Level -import ch.qos.logback.classic.pattern.ThrowableProxyConverter -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.classic.spi.ThrowableProxy -import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR -import io.airbyte.commons.constants.AirbyteSecretConstants.SECRETS_MASK -import io.airbyte.commons.logging.LoggingHelper -import io.airbyte.commons.logging.LoggingHelper.LOG_SOURCE_MDC_KEY -import io.airbyte.commons.logging.LoggingHelper.SOURCE_LOGGER_PREFIX -import io.mockk.every -import io.mockk.mockk -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Test -import java.util.UUID - -private class AirbyteOperationsJobLogbackMessageLayoutTest { - @Test - fun testLogMessage() { - val logSource = LoggingHelper.applyColor(LoggingHelper.Color.BLUE_BACKGROUND, SOURCE_LOGGER_PREFIX) - val context = mapOf(LOG_SOURCE_MDC_KEY to logSource) - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val logMessage = "test message" - val logThreadName = "Test Thread" - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns OPERATIONS_JOB_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns null - every { timeStamp } returns timestamp - } - - val layout = AirbyteOperationsJobLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00 ") - expected.append(logSource) - expected.append(" > $logMessage$LINE_SEPARATOR") - assertEquals(expected.toString(), message) - } - - @Test - fun testLogMessageWithMaskedData() { - val logSource = LoggingHelper.applyColor(LoggingHelper.Color.BLUE_BACKGROUND, SOURCE_LOGGER_PREFIX) - val context = mapOf(LOG_SOURCE_MDC_KEY to logSource) - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val apiKey = UUID.randomUUID().toString() - val logMessage = "test message (\"api_token\":\"$apiKey\")" - val logThreadName = "Test Thread" - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns OPERATIONS_JOB_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns null - every { timeStamp } returns timestamp - } - - val layout = AirbyteOperationsJobLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00 ") - expected.append(logSource) - expected.append(" > ${logMessage.replace(apiKey, SECRETS_MASK)}$LINE_SEPARATOR") - assertEquals(expected.toString(), message) - } - - @Test - fun testLogMessageWithException() { - val throwableConverter = ThrowableProxyConverter() - throwableConverter.start() - - val logSource = LoggingHelper.applyColor(LoggingHelper.Color.BLUE_BACKGROUND, SOURCE_LOGGER_PREFIX) - val context = mapOf(LOG_SOURCE_MDC_KEY to logSource) - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val logMessage = "test message" - val logThreadName = "Test Thread" - val timestamp = 0L - val exception = RuntimeException("test", NullPointerException("root")) - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns OPERATIONS_JOB_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns ThrowableProxy(exception) - every { timeStamp } returns timestamp - } - - val layout = AirbyteOperationsJobLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00 ") - expected.append(logSource) - expected.append(" > $logMessage") - expected.append("$LINE_SEPARATOR${throwableConverter.convert(event)}") - expected.append(LINE_SEPARATOR) - assertEquals(expected.toString(), message) - } -} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt deleted file mode 100644 index c91336931eb..00000000000 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbytePlatformLogbackMessageLayoutTest.kt +++ /dev/null @@ -1,224 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.Level -import ch.qos.logback.classic.pattern.ThrowableProxyConverter -import ch.qos.logback.classic.spi.ILoggingEvent -import ch.qos.logback.classic.spi.ThrowableProxy -import ch.qos.logback.core.CoreConstants.DASH_CHAR -import ch.qos.logback.core.CoreConstants.LINE_SEPARATOR -import ch.qos.logback.core.CoreConstants.TAB -import ch.qos.logback.core.pattern.color.ANSIConstants -import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_END -import ch.qos.logback.core.pattern.color.ANSIConstants.ESC_START -import io.airbyte.commons.constants.AirbyteSecretConstants.SECRETS_MASK -import io.mockk.every -import io.mockk.mockk -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test -import java.util.UUID - -private class AirbytePlatformLogbackMessageLayoutTest { - @BeforeEach - fun setup() { - System.setProperty(CI_MODE_SYSTEM_PROPERTY, "false") - } - - @Test - fun testCiModeLogMessage() { - System.setProperty(CI_MODE_SYSTEM_PROPERTY, "true") - val spanId = UUID.randomUUID().toString() - val traceId = UUID.randomUUID().toString() - val context = - mapOf( - DATADOG_SPAN_ID_KEY to spanId, - DATADOG_TRACE_ID_KEY to traceId, - ) - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val logMessage = "test message" - val logThreadName = "Test Thread" - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns PLATFORM_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns null - every { timeStamp } returns timestamp - } - - val layout = AirbytePlatformLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00,000 [dd.trace_id=$traceId dd.span_id=$spanId] ") - expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") - expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") - expected.append("$logMessage$LINE_SEPARATOR") - assertEquals(expected.toString(), message) - } - - @Test - fun tesLogMessage() { - val context = emptyMap() - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val logMessage = "test message" - val logThreadName = "Test Thread" - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns PLATFORM_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns null - every { timeStamp } returns timestamp - } - - val layout = AirbytePlatformLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00,000 ") - expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") - expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") - expected.append("$logMessage$LINE_SEPARATOR") - assertEquals(expected.toString(), message) - } - - @Test - fun tesLogMessageWithMaskedData() { - val context = emptyMap() - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val apiKey = UUID.randomUUID().toString() - val logMessage = "test message (\"api_token\":\"$apiKey\")" - val logThreadName = "Test Thread" - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns PLATFORM_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns null - every { timeStamp } returns timestamp - } - - val layout = AirbytePlatformLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00,000 ") - expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") - expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") - expected.append("${logMessage.replace(apiKey, SECRETS_MASK)}$LINE_SEPARATOR") - assertEquals(expected.toString(), message) - } - - @Test - fun tesLogMessageWithException() { - val throwableConverter = ThrowableProxyConverter() - throwableConverter.start() - - val context = emptyMap() - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.ERROR - val logMessage = "test message" - val logThreadName = "Test Thread" - val exception = RuntimeException("test", NullPointerException("root")) - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns PLATFORM_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns ThrowableProxy(exception) - every { timeStamp } returns timestamp - } - - val layout = AirbytePlatformLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00,000 ") - expected.append("[$logThreadName]$TAB$ESC_START${ANSIConstants.BOLD + ANSIConstants.RED_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") - expected.append("${formatClassName(className)}($methodName):$lineNumber $DASH_CHAR ") - expected.append(logMessage) - expected.append("$LINE_SEPARATOR${throwableConverter.convert(event)}") - expected.append(LINE_SEPARATOR) - assertEquals(expected.toString(), message) - } - - @Test - fun tesLogMessageWithCallerContext() { - val callerClassName = "io.airbyte.CallerTestClass" - val callerMethodName = "callerTestMethod" - val callerLineNumber = "999" - val callerThreadName = "Caller Test Thread" - val context = - mapOf( - CALLER_QUALIFIED_CLASS_NAME_PATTERN to callerClassName, - CALLER_METHOD_NAME_PATTERN to callerMethodName, - CALLER_LINE_NUMBER_PATTERN to callerLineNumber, - CALLER_THREAD_NAME_PATTERN to callerThreadName, - ) - val className = "io.airbyte.TestClass" - val methodName = "testMethod" - val fileName = "TestClass.kt" - val lineNumber = 12345 - val logLevel = Level.INFO - val logMessage = "test message" - val logThreadName = "Test Thread" - val timestamp = 0L - val event = - mockk { - every { callerData } returns arrayOf(StackTraceElement(className, methodName, fileName, lineNumber)) - every { formattedMessage } returns logMessage - every { level } returns logLevel - every { loggerName } returns PLATFORM_LOGGER_NAME - every { mdcPropertyMap } returns context - every { threadName } returns logThreadName - every { throwableProxy } returns null - every { timeStamp } returns timestamp - } - - val layout = AirbytePlatformLogbackMessageLayout() - val message = layout.doLayout(loggingEvent = event) - - val expected = StringBuilder() - expected.append("1970-01-01 00:00:00,000 ") - expected.append("[$callerThreadName]$TAB$ESC_START${ANSIConstants.BLUE_FG}$ESC_END${logLevel}$DEFAULT_COLOR$TAB") - expected.append("${formatClassName(callerClassName)}($callerMethodName):$callerLineNumber $DASH_CHAR ") - expected.append("$logMessage$LINE_SEPARATOR") - assertEquals(expected.toString(), message) - } -} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt deleted file mode 100644 index 9ca44ca52dd..00000000000 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/AirbyteStorageMDCBasedDiscriminatorTest.kt +++ /dev/null @@ -1,47 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.spi.ILoggingEvent -import io.airbyte.commons.logging.DEFAULT_JOB_LOG_PATH_MDC_KEY -import io.mockk.every -import io.mockk.mockk -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test - -private class AirbyteStorageMDCBasedDiscriminatorTest { - private lateinit var discriminator: AirbyteStorageMDCBasedDiscriminator - - @BeforeEach - fun setup() { - discriminator = AirbyteStorageMDCBasedDiscriminator(mdcValueExtractor = { mdc -> mdc[DEFAULT_JOB_LOG_PATH_MDC_KEY] ?: "" }) - } - - @Test - fun testLoggingEventWithPathInContext() { - val jobPath = "/some/job/path" - val context = mapOf(DEFAULT_JOB_LOG_PATH_MDC_KEY to jobPath) - val loggingEvent = - mockk { - every { mdcPropertyMap } returns context - } - - val discriminatorValue = discriminator.getDiscriminatingValue(event = loggingEvent) - assertEquals(jobPath, discriminatorValue) - } - - @Test - fun testLoggingEventWithEmptyContext() { - val context = emptyMap() - val loggingEvent = - mockk { - every { mdcPropertyMap } returns context - } - - val discriminatorValue = discriminator.getDiscriminatingValue(event = loggingEvent) - assertEquals("", discriminatorValue) - } -} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt deleted file mode 100644 index 89a9c1a6acb..00000000000 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/logging/logback/MaskedDataConverterTest.kt +++ /dev/null @@ -1,186 +0,0 @@ -/* - * Copyright (c) 2020-2024 Airbyte, Inc., all rights reserved. - */ - -package io.airbyte.commons.logging.logback - -import ch.qos.logback.classic.spi.ILoggingEvent -import io.airbyte.commons.constants.AirbyteSecretConstants -import io.airbyte.commons.json.Jsons -import io.mockk.every -import io.mockk.mockk -import org.junit.jupiter.api.Assertions -import org.junit.jupiter.api.Assertions.assertEquals -import org.junit.jupiter.api.Assertions.assertFalse -import org.junit.jupiter.api.Assertions.assertNotEquals -import org.junit.jupiter.api.Assertions.assertTrue -import org.junit.jupiter.api.BeforeEach -import org.junit.jupiter.api.Test - -private class MaskedDataConverterTest { - private lateinit var converter: MaskedDataConverter - - @BeforeEach - fun setup() { - converter = MaskedDataConverter(specMaskFile = TEST_SPEC_SECRET_MASK_YAML) - } - - @Test - fun testMaskingMessageWithStringSecret() { - val loggingEvent = - mockk { - every { formattedMessage } returns JSON_WITH_STRING_SECRETS - } - - val result = converter.convert(event = loggingEvent) - - val json = Jsons.deserialize(result) - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()) - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()) - } - - @Test - fun testMaskingMessageWithStringSecretWithQuotes() { - val loggingEvent = - mockk { - every { formattedMessage } returns JSON_WITH_STRING_WITH_QUOTE_SECRETS - } - - val result = converter.convert(event = loggingEvent) - - val json = Jsons.deserialize(result) - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()) - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()) - } - - @Test - fun testMaskingMessageWithNumberSecret() { - val loggingEvent = - mockk { - every { formattedMessage } returns JSON_WITH_NUMBER_SECRETS - } - - val result = converter.convert(event = loggingEvent) - - val json = Jsons.deserialize(result) - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(FOO).asText()) - assertEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("bar").asText()) - } - - @Test - fun testMaskingMessageWithWithoutSecrets() { - val loggingEvent = - mockk { - every { formattedMessage } returns JSON_WITHOUT_SECRETS - } - - val result = converter.convert(event = loggingEvent) - - val json = Jsons.deserialize(result) - assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json["prop1"].asText()) - assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("prop2").asText()) - assertNotEquals(AirbyteSecretConstants.SECRETS_MASK, json.get(OTHER).get("prop3").asText()) - } - - @Test - fun testMaskingMessageThatDoesNotMatchPattern() { - val message = "This is some log message that doesn't match the pattern." - val loggingEvent = - mockk { - every { formattedMessage } returns message - } - - val result = converter.convert(event = loggingEvent) - - assertFalse(result.contains(AirbyteSecretConstants.SECRETS_MASK)) - assertEquals(message, result) - } - - @Test - fun testMissingMaskingFileDoesNotPreventLogging() { - val logEvent = - mockk { - every { formattedMessage } returns JSON_WITHOUT_SECRETS - } - - Assertions.assertDoesNotThrow { - val converter = MaskedDataConverter(specMaskFile = "/does_not_exist.yaml") - val result = converter.convert(event = logEvent) - assertEquals(JSON_WITHOUT_SECRETS, result) - } - } - - @Test - fun testMaskingMessageWithSqlValues() { - val loggingEvent = - mockk { - every { formattedMessage } returns TEST_LOGGED_SQL_VALUES - } - - val result = converter.convert(event = loggingEvent) - - assertEquals(REDACTED_LOGGED_SQL_VALUES, result) - } - - @Test - fun testMaskingMessageWithRecordContents() { - val loggingEvent = - mockk { - every { formattedMessage } returns TEST_LOGGED_RECORD_CONTENTS - } - - val result = converter.convert(event = loggingEvent) - - assertEquals(REDACTED_LOGGED_RECORD_CONTENTS, result) - } - - @Test - fun testMaskingPlainTextLogLine() { - val message = "500 Server Error: Internal Server Error for url: https://localhost/api/v1/search?limit=100&archived=false&hapikey=secret-key_1" - val loggingEvent = - mockk { - every { formattedMessage } returns message - } - val result = converter.convert(event = loggingEvent) - assertFalse(result.contains("apikey=secret-key_1")) - assertTrue(result.contains("apikey=${AirbyteSecretConstants.SECRETS_MASK}")) - } - - companion object { - private const val FOO: String = "foo" - private const val OTHER: String = "other" - private const val JSON_WITH_STRING_SECRETS = "{\"$FOO\":\"test\",\"$OTHER\":{\"prop\":\"value\",\"bar\":\"1234\"}}" - private const val JSON_WITH_NUMBER_SECRETS = "{\"$FOO\":\"test\",\"$OTHER\":{\"prop\":\"value\",\"bar\":1234}}" - private const val JSON_WITH_STRING_WITH_QUOTE_SECRETS = "{\"$FOO\":\"\\\"test\\\"\",\"$OTHER\":{\"prop\":\"value\",\"bar\":\"1234\"}}" - private const val JSON_WITHOUT_SECRETS = "{\"prop1\":\"test\",\"$OTHER\":{\"prop2\":\"value\",\"prop3\":1234}}" - private const val REDACTED_LOGGED_SQL_VALUES = - ( - "2024-03-19 20:03:43 \u001B[43mdestination\u001B[0m > ERROR pool-4-thread-1 i.a.c.i.d.a.FlushWorkers(flush\$lambda$6):192 " + - "Flush Worker (632c9) -- flush worker " + - "error: java.lang.RuntimeException: org.jooq.exception.DataAccessException: SQL [insert into " + - "\"airbyte_internal\".\"public_raw__stream_foo\" (_airbyte_raw_id, _airbyte_data, _airbyte_meta, _airbyte_extracted_at, " + - "_airbyte_loaded_at) values (${AirbyteSecretConstants.SECRETS_MASK}" - ) - private const val REDACTED_LOGGED_RECORD_CONTENTS: String = - ( - "2024-03-21 12:19:08 \u001B[43mdestination\u001B[0m > ERROR i.a.c.i.b.Destination" + - "\$ShimToSerializedAirbyteMessageConsumer(consumeMessage):120" + - " Received invalid message:${AirbyteSecretConstants.SECRETS_MASK}" - ) - private const val TEST_LOGGED_SQL_VALUES: String = - ( - "2024-03-19 20:03:43 \u001B[43mdestination\u001B[0m > ERROR pool-4-thread-1 " + - "i.a.c.i.d.a.FlushWorkers(flush\$lambda\$6):192 Flush Worker (632c9) -- flush worker " + - "error: java.lang.RuntimeException: org.jooq.exception.DataAccessException: SQL [insert into " + - "\"airbyte_internal\".\"public_raw__stream_foo\" (_airbyte_raw_id, _airbyte_data, _airbyte_meta, _airbyte_extracted_at, " + - "_airbyte_loaded_at) values ('UUID', a bunch of other stuff" - ) - private const val TEST_LOGGED_RECORD_CONTENTS: String = - ( - "2024-03-21 12:19:08 \u001B[43mdestination\u001B[0m > ERROR " + - "i.a.c.i.b.Destination\$ShimToSerializedAirbyteMessageConsumer(consumeMessage):120 " + - "Received invalid message: {\"type\":\"RECORD\",\"record\":{\"namespace\":\"" - ) - private const val TEST_SPEC_SECRET_MASK_YAML = "/test_spec_secret_mask.yaml" - } -} diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt index 09f41441d11..971474d4455 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientFactoryTest.kt @@ -4,8 +4,6 @@ package io.airbyte.commons.storage -import com.google.cloud.storage.Bucket -import com.google.cloud.storage.BucketInfo import com.google.cloud.storage.Storage import io.micronaut.context.annotation.Bean import io.micronaut.context.annotation.Primary @@ -19,10 +17,6 @@ import org.junit.jupiter.api.Assertions.assertEquals import org.junit.jupiter.api.Assertions.assertTrue import org.junit.jupiter.api.Test import software.amazon.awssdk.services.s3.S3Client -import software.amazon.awssdk.services.s3.model.CreateBucketRequest -import software.amazon.awssdk.services.s3.model.CreateBucketResponse -import software.amazon.awssdk.services.s3.model.HeadBucketRequest -import software.amazon.awssdk.services.s3.model.NoSuchBucketException /** * Note @MockBean doesn't work in this class for some reason, possible due to a Micronaut 3 problem. @@ -72,11 +66,7 @@ class GcsStorageClientFactoryTest { every { applicationCredentials } returns "mock-app-creds" } - val gcsClient: Storage = - mockk { - every { get(any(), *anyVararg()) } returns null - every { create(any()) } returns mockk() - } + val gcsClient: Storage = mockk() init { mockkStatic(GcsStorageConfig::gcsClient) @@ -107,11 +97,7 @@ class MinioStorageClientFactoryTest { every { endpoint } returns "mock-endpoint" } - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() init { mockkStatic(MinioStorageConfig::s3Client) @@ -142,11 +128,7 @@ class S3StorageClientFactoryTest { every { region } returns "mock-region" } - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() init { mockkStatic(S3StorageConfig::s3Client) diff --git a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt index 711a61259a7..9ff5ea3ff17 100644 --- a/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt +++ b/airbyte-commons-storage/src/test/kotlin/io/airbyte/commons/storage/StorageClientTest.kt @@ -6,12 +6,10 @@ package io.airbyte.commons.storage import com.azure.core.util.BinaryData import com.azure.storage.blob.BlobClient -import com.azure.storage.blob.BlobContainerClient import com.azure.storage.blob.BlobServiceClient import com.google.cloud.storage.Blob import com.google.cloud.storage.BlobId import com.google.cloud.storage.BlobInfo -import com.google.cloud.storage.BucketInfo import com.google.cloud.storage.Storage import io.mockk.every import io.mockk.mockk @@ -26,20 +24,15 @@ import org.junit.jupiter.api.io.TempDir import software.amazon.awssdk.core.ResponseBytes import software.amazon.awssdk.core.sync.RequestBody import software.amazon.awssdk.services.s3.S3Client -import software.amazon.awssdk.services.s3.model.CreateBucketRequest -import software.amazon.awssdk.services.s3.model.CreateBucketResponse import software.amazon.awssdk.services.s3.model.DeleteObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectRequest import software.amazon.awssdk.services.s3.model.GetObjectResponse -import software.amazon.awssdk.services.s3.model.HeadBucketRequest import software.amazon.awssdk.services.s3.model.HeadObjectRequest -import software.amazon.awssdk.services.s3.model.NoSuchBucketException import software.amazon.awssdk.services.s3.model.NoSuchKeyException import software.amazon.awssdk.services.s3.model.PutObjectRequest import java.io.InputStream import java.nio.charset.StandardCharsets import java.nio.file.Path -import com.google.cloud.storage.Bucket as GcsBucket private const val KEY = "a" private const val DOC1 = "hello" @@ -62,11 +55,6 @@ class AzureStorageClientTest { @Test fun `key matches`() { val azureClient: BlobServiceClient = mockk() - val blobContainerClient: BlobContainerClient = mockk() - - every { azureClient.getBlobContainerClient(any()) } returns blobContainerClient - every { blobContainerClient.exists() } returns false - every { blobContainerClient.createIfNotExists() } returns true val clientState = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) assertEquals("/state/$KEY", clientState.key(KEY)) @@ -78,12 +66,6 @@ class AzureStorageClientTest { @Test fun `read missing doc`() { val azureClient: BlobServiceClient = mockk() - val blobContainerClient: BlobContainerClient = mockk() - - every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient - every { blobContainerClient.exists() } returns false - every { blobContainerClient.createIfNotExists() } returns true - val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns @@ -100,12 +82,6 @@ class AzureStorageClientTest { @Test fun `read existing doc`() { val azureClient: BlobServiceClient = mockk() - val blobContainerClient: BlobContainerClient = mockk() - - every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient - every { blobContainerClient.exists() } returns false - every { blobContainerClient.createIfNotExists() } returns true - val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns @@ -129,19 +105,15 @@ class AzureStorageClientTest { @Test fun `write doc`() { val azureClient: BlobServiceClient = mockk() - val blobClient: BlobClient = mockk() - val blobContainerClient: BlobContainerClient = mockk() + var blobClient: BlobClient = mockk() + val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) - every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient - every { blobContainerClient.exists() } returns false - every { blobContainerClient.createIfNotExists() } returns true + every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns + mockk { every { getBlobClient(client.key(KEY)) } returns blobClient } every { blobClient.exists() } returns true every { blobClient.upload(any()) } returns Unit - val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) - every { blobContainerClient.getBlobClient(client.key(KEY)) } returns blobClient - client.write(KEY, DOC1) verify { blobClient.upload(any()) } } @@ -149,12 +121,6 @@ class AzureStorageClientTest { @Test fun `delete doc`() { val azureClient: BlobServiceClient = mockk() - val blobContainerClient: BlobContainerClient = mockk() - - every { azureClient.getBlobContainerClient(config.bucketName(DocumentType.STATE)) } returns blobContainerClient - every { blobContainerClient.exists() } returns false - every { blobContainerClient.createIfNotExists() } returns true - val client = AzureStorageClient(config = config, type = DocumentType.STATE, azureClient = azureClient) // doc not deleted @@ -184,11 +150,7 @@ class GcsStorageClientTest { @Test fun `blobId matches`() { - val gcsClient: Storage = - mockk { - every { get(any(), *anyVararg()) } returns null - every { create(any()) } returns mockk() - } + val gcsClient: Storage = mockk() val clientState = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) assertEquals(BlobId.of(buckets.state, "/state/$KEY"), clientState.blobId(KEY)) @@ -199,11 +161,7 @@ class GcsStorageClientTest { @Test fun `read missing doc`() { - val gcsClient: Storage = - mockk { - every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null - every { create(any()) } returns mockk() - } + val gcsClient: Storage = mockk() val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) // verify no blob is returned @@ -220,11 +178,7 @@ class GcsStorageClientTest { @Test fun `read existing doc`() { - val gcsClient: Storage = - mockk { - every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null - every { create(any()) } returns mockk() - } + val gcsClient: Storage = mockk() val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) val blobId = client.blobId(KEY) @@ -242,11 +196,7 @@ class GcsStorageClientTest { @Test fun `write doc`() { - val gcsClient: Storage = - mockk { - every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null - every { create(any()) } returns mockk() - } + val gcsClient: Storage = mockk() val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) val blobId = client.blobId(KEY) @@ -260,11 +210,7 @@ class GcsStorageClientTest { @Test fun `delete doc`() { - val gcsClient: Storage = - mockk { - every { get(config.bucketName(DocumentType.STATE), *anyVararg()) } returns null - every { create(any()) } returns mockk() - } + val gcsClient: Storage = mockk() val client = GcsStorageClient(config = config, type = DocumentType.STATE, gcsClient = gcsClient) val blobId = client.blobId(KEY) @@ -317,11 +263,7 @@ class MinioStorageClientTest { @Test fun `key matches`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val clientState = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) assertEquals("/state/$KEY", clientState.key(KEY)) @@ -332,11 +274,7 @@ class MinioStorageClientTest { @Test fun `read missing doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -355,11 +293,7 @@ class MinioStorageClientTest { @Test fun `read existing doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -382,11 +316,7 @@ class MinioStorageClientTest { @Test fun `write doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -404,11 +334,7 @@ class MinioStorageClientTest { @Test fun `delete doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = MinioStorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val existsRequest = @@ -442,11 +368,7 @@ class S3StorageClientTest { @Test fun `key matches`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val clientState = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) assertEquals("/state/$KEY", clientState.key(KEY)) @@ -457,11 +379,7 @@ class S3StorageClientTest { @Test fun `read missing doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -480,11 +398,7 @@ class S3StorageClientTest { @Test fun `read existing doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -507,11 +421,7 @@ class S3StorageClientTest { @Test fun `write doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val request = @@ -529,11 +439,7 @@ class S3StorageClientTest { @Test fun `delete doc`() { - val s3Client: S3Client = - mockk { - every { createBucket(any()) } returns mockk() - every { headBucket(any()) } throws NoSuchBucketException.builder().build() - } + val s3Client: S3Client = mockk() val client = S3StorageClient(config = config, type = DocumentType.STATE, s3Client = s3Client) val existsRequest = diff --git a/airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml b/airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml deleted file mode 100644 index 5a10d04d808..00000000000 --- a/airbyte-commons-storage/src/test/resources/test_spec_secret_mask.yaml +++ /dev/null @@ -1,5 +0,0 @@ ---- -properties: - - foo - - bar - - baz diff --git a/airbyte-commons-worker/build.gradle.kts b/airbyte-commons-worker/build.gradle.kts index 25e582f25b1..30bb3d5e552 100644 --- a/airbyte-commons-worker/build.gradle.kts +++ b/airbyte-commons-worker/build.gradle.kts @@ -3,9 +3,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -36,6 +33,7 @@ dependencies { implementation(libs.bundles.datadog) implementation(libs.commons.io) implementation(libs.bundles.apache) + implementation(libs.bundles.log4j) implementation(libs.failsafe.okhttp) implementation(libs.google.cloud.storage) implementation(libs.okhttp) @@ -96,7 +94,6 @@ dependencies { testImplementation(libs.assertj.core) testImplementation(libs.junit.pioneer) testImplementation(libs.mockk) - testImplementation(libs.bundles.logback) testRuntimeOnly(libs.junit.jupiter.engine) testRuntimeOnly(libs.javax.databind) diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java index f48b9b9de61..bcd5038d6c2 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/general/ReplicationWorkerTest.java @@ -4,7 +4,6 @@ package io.airbyte.workers.general; -import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_JOB_LOG_PATH_MDC_KEY; import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_LOG_FILENAME; import static io.airbyte.commons.logging.LogMdcHelperKt.DEFAULT_WORKSPACE_MDC_KEY; import static io.airbyte.metrics.lib.OssMetricsRegistry.WORKER_DESTINATION_ACCEPT_TIMEOUT; @@ -47,7 +46,6 @@ import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.LocalLogMdcHelper; import io.airbyte.commons.logging.LogMdcHelper; -import io.airbyte.commons.logging.LoggingHelper; import io.airbyte.commons.string.Strings; import io.airbyte.config.ConfigSchema; import io.airbyte.config.FailureReason; @@ -312,9 +310,6 @@ void setup() throws Exception { destinationCatalogGenerator = mock(DestinationCatalogGenerator.class); when(destinationCatalogGenerator.generateDestinationCatalog(any())) .thenReturn(new DestinationCatalogGenerator.CatalogGenerationResult(destinationConfig.getCatalog(), Map.of())); - - MDC.put(DEFAULT_JOB_LOG_PATH_MDC_KEY, jobRoot.toString()); - MDC.put(LoggingHelper.LOG_SOURCE_MDC_KEY, LoggingHelper.platformLogSource()); } @AfterEach @@ -703,7 +698,6 @@ void testLoggingInThreads() throws IOException, WorkerException { @Test void testLogMaskRegex() throws IOException { - MDC.clear(); final Path jobRoot = Files.createTempDirectory(Path.of("/tmp"), "mdc_test"); MDC.put(DEFAULT_WORKSPACE_MDC_KEY, jobRoot.toString()); diff --git a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java index bc775f25a33..3f8904f7239 100644 --- a/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java +++ b/airbyte-commons-worker/src/test/java/io/airbyte/workers/internal/VersionedAirbyteStreamFactoryTest.java @@ -10,6 +10,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import io.airbyte.commons.json.Jsons; import io.airbyte.commons.logging.MdcScope.Builder; @@ -41,6 +42,7 @@ import java.util.stream.Collectors; import java.util.stream.Stream; import org.assertj.core.api.Assertions; +import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Nested; @@ -72,6 +74,11 @@ void setup() { logger = spy(LoggerFactory.getLogger(VersionedAirbyteStreamFactoryTest.class)); } + @AfterEach() + void afterEach() { + verifyNoMoreInteractions(logger); + } + @Test void testValid() { final AirbyteMessage record1 = AirbyteMessageUtils.createRecordMessage(STREAM_NAME, FIELD_NAME, "green"); diff --git a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt index 40adeb8bcf4..6aac766195a 100644 --- a/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt +++ b/airbyte-commons/src/main/kotlin/io/airbyte/commons/envvar/EnvVar.kt @@ -68,7 +68,6 @@ enum class EnvVar { LOCAL_DOCKER_MOUNT, LOCAL_ROOT, LOG4J_CONFIGURATION_FILE, - LOG_IDLE_ROUTE_TTL, LOG_LEVEL, METRIC_CLIENT, @@ -113,19 +112,10 @@ enum class EnvVar { ; /** - * Fetch the value of this [EnvVar], returning [default] if the value is null or an empty string. + * Fetch the value of this [EnvVar], returning [default] if the value is null or an empty string * * @param default value to return if this environment variable is null or empty */ @JvmOverloads fun fetch(default: String? = null): String? = System.getenv(this.name).takeUnless { it.isNullOrBlank() } ?: default - - /** - * Fetch the value of this [EnvVar], returning a non-null [default] if the value is null or an empty string. - * - * @param default value to return if this environment variable is null or empty - * - * If kotlin contracts ever become stable, this method could be replaced with a contract on the [fetch] method. - */ - fun fetchNotNull(default: String = ""): String = System.getenv(this.name).takeUnless { it.isNullOrBlank() } ?: default } diff --git a/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt b/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt index de4afb7764e..1148d2ec5a5 100644 --- a/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt +++ b/airbyte-commons/src/test/kotlin/io/airbyte/commons/envvar/EnvVarTest.kt @@ -27,21 +27,4 @@ class EnvVarTest { fun `fetch returns null when unset and no default defined`() { assertNull(EnvVar.Z_TESTING_PURPOSES_ONLY_3.fetch()) } - - @Test - fun `fetchNotNull returns the correct value when set`() { - assertEquals("value-defined", EnvVar.Z_TESTING_PURPOSES_ONLY_1.fetchNotNull()) - assertEquals("value-defined", EnvVar.Z_TESTING_PURPOSES_ONLY_1.fetchNotNull(default = "not this value")) - } - - @Test - fun `fetchNotNull returns the default value if missing or blank`() { - val default = "defined as blank, so should return this value instead" - assertEquals(default, EnvVar.Z_TESTING_PURPOSES_ONLY_2.fetchNotNull(default = default)) - } - - @Test - fun `fetchNotNull returns empty string when unset and no default defined`() { - assertEquals("", EnvVar.Z_TESTING_PURPOSES_ONLY_3.fetchNotNull()) - } } diff --git a/airbyte-config/config-persistence/build.gradle.kts b/airbyte-config/config-persistence/build.gradle.kts index fc1aa8d7f65..b16d1f260e9 100644 --- a/airbyte-config/config-persistence/build.gradle.kts +++ b/airbyte-config/config-persistence/build.gradle.kts @@ -8,10 +8,6 @@ configurations.all { exclude(group = "io.micronaut.flyway") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut diff --git a/airbyte-connector-builder-server/build.gradle.kts b/airbyte-connector-builder-server/build.gradle.kts index c93f9bc24a5..571fe6f032c 100644 --- a/airbyte-connector-builder-server/build.gradle.kts +++ b/airbyte-connector-builder-server/build.gradle.kts @@ -9,10 +9,6 @@ plugins { id("io.airbyte.gradle.kube-reload") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { // Micronaut dependencies annotationProcessor(platform(libs.micronaut.platform)) @@ -48,7 +44,6 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-server")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-worker")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-config:config-persistence")) @@ -64,7 +59,6 @@ dependencies { implementation("io.pebbletemplates:pebble:3.2.2") runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) testRuntimeOnly(libs.junit.jupiter.engine) testImplementation(libs.bundles.junit) diff --git a/airbyte-connector-builder-server/src/main/resources/application.yml b/airbyte-connector-builder-server/src/main/resources/application.yml index 8984c63bcc9..52f7b79fe43 100644 --- a/airbyte-connector-builder-server/src/main/resources/application.yml +++ b/airbyte-connector-builder-server/src/main/resources/application.yml @@ -128,11 +128,6 @@ jackson: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + io.airbyte.bootloader: DEBUG + # Uncomment to help resolve issues with conditional beans + # io.micronaut.context.condition: DEBUG diff --git a/airbyte-connector-rollout-client/build.gradle.kts b/airbyte-connector-rollout-client/build.gradle.kts index fabb9309d57..3939070986f 100644 --- a/airbyte-connector-rollout-client/build.gradle.kts +++ b/airbyte-connector-rollout-client/build.gradle.kts @@ -18,10 +18,6 @@ repositories { mavenCentral() } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { // TODO: remove the deps not being used compileOnly(libs.lombok) @@ -33,15 +29,12 @@ dependencies { annotationProcessor("info.picocli:picocli-codegen:4.7.4") implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-connector-rollout-shared")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-api:server-api")) implementation(libs.airbyte.protocol) implementation("com.fasterxml.jackson.datatype:jackson-datatype-jsr310:2.12.5") - runtimeOnly(libs.bundles.logback) - } application { diff --git a/airbyte-connector-rollout-client/src/main/resources/application.yml b/airbyte-connector-rollout-client/src/main/resources/application.yml index 645492978f7..c4ec9017422 100644 --- a/airbyte-connector-rollout-client/src/main/resources/application.yml +++ b/airbyte-connector-rollout-client/src/main/resources/application.yml @@ -56,13 +56,7 @@ jackson: mapper: ACCEPT_CASE_INSENSITIVE_ENUMS: true -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG +logging: + level: + root: INFO + io.airbyte: DEBUG diff --git a/airbyte-connector-rollout-worker/build.gradle.kts b/airbyte-connector-rollout-worker/build.gradle.kts index d3ff720371a..a801c4a1744 100644 --- a/airbyte-connector-rollout-worker/build.gradle.kts +++ b/airbyte-connector-rollout-worker/build.gradle.kts @@ -4,10 +4,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) @@ -22,13 +18,8 @@ dependencies { implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-connector-rollout-shared")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) - - - runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) } airbyte { diff --git a/airbyte-connector-rollout-worker/src/main/resources/application.properties b/airbyte-connector-rollout-worker/src/main/resources/application.properties new file mode 100644 index 00000000000..fe87f74840d --- /dev/null +++ b/airbyte-connector-rollout-worker/src/main/resources/application.properties @@ -0,0 +1 @@ +micronaut.application.name=airbyte-connector-rollout-worker diff --git a/airbyte-connector-rollout-worker/src/main/resources/application.yml b/airbyte-connector-rollout-worker/src/main/resources/application.yml index 2684555bca6..8d4504d4aa1 100644 --- a/airbyte-connector-rollout-worker/src/main/resources/application.yml +++ b/airbyte-connector-rollout-worker/src/main/resources/application.yml @@ -78,13 +78,7 @@ jackson: mapper: ACCEPT_CASE_INSENSITIVE_ENUMS: true -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG +logging: + level: + root: INFO + io.airbyte: DEBUG diff --git a/airbyte-connector-sidecar/build.gradle.kts b/airbyte-connector-sidecar/build.gradle.kts index 306664f7727..aaac4102113 100644 --- a/airbyte-connector-sidecar/build.gradle.kts +++ b/airbyte-connector-sidecar/build.gradle.kts @@ -29,7 +29,6 @@ configurations.all { exclude(group = "io.micronaut.openapi") exclude(group = "io.micronaut.flyway") exclude(group = "io.micronaut.sql") - exclude(group="org.apache.logging.log4j") } dependencies { @@ -37,6 +36,7 @@ dependencies { ksp(libs.bundles.micronaut.annotation.processor) implementation(platform(libs.micronaut.platform)) + implementation(libs.bundles.log4j) implementation(libs.bundles.micronaut.light) implementation(libs.google.cloud.storage) implementation(libs.java.jwt) @@ -48,8 +48,8 @@ dependencies { implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-converters")) - implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-storage")) + implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-worker")) implementation(project(":oss:airbyte-config:config-models")) @@ -59,7 +59,7 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.kotlin.reflect) - runtimeOnly(libs.bundles.logback) + runtimeOnly(libs.appender.log4j2) runtimeOnly(libs.bundles.bouncycastle) // cryptography package kspTest(platform(libs.micronaut.platform)) diff --git a/airbyte-connector-sidecar/src/main/resources/application.yml b/airbyte-connector-sidecar/src/main/resources/application.yml index 2b59b19e9a3..57316af5b77 100644 --- a/airbyte-connector-sidecar/src/main/resources/application.yml +++ b/airbyte-connector-sidecar/src/main/resources/application.yml @@ -67,14 +67,3 @@ airbyte: retries: delay-seconds: ${WORKLOAD_API_RETRY_DELAY_SECONDS:2} max: ${WORKLOAD_API_MAX_RETRIES:5} - -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG diff --git a/airbyte-container-orchestrator/build.gradle.kts b/airbyte-container-orchestrator/build.gradle.kts index fdd743ee78b..1fd25b2213d 100644 --- a/airbyte-container-orchestrator/build.gradle.kts +++ b/airbyte-container-orchestrator/build.gradle.kts @@ -23,10 +23,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - val airbyteProtocol by configurations.creating dependencies { @@ -45,16 +41,17 @@ dependencies { implementation(libs.sts) implementation(libs.kubernetes.client) implementation(libs.bundles.datadog) + implementation(libs.bundles.log4j) implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-commons-converters")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-with-dependencies")) implementation(project(":oss:airbyte-commons-worker")) @@ -66,7 +63,6 @@ dependencies { implementation(project(":oss:airbyte-worker-models")) runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-container-orchestrator/src/main/resources/application.yml b/airbyte-container-orchestrator/src/main/resources/application.yml index 30ac58faf07..748b73fe5c9 100644 --- a/airbyte-container-orchestrator/src/main/resources/application.yml +++ b/airbyte-container-orchestrator/src/main/resources/application.yml @@ -145,14 +145,3 @@ endpoints: threaddump: enabled: true sensitive: true - -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG diff --git a/airbyte-cron/build.gradle.kts b/airbyte-cron/build.gradle.kts index eac5c77830a..2dd262791fe 100644 --- a/airbyte-cron/build.gradle.kts +++ b/airbyte-cron/build.gradle.kts @@ -4,10 +4,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -39,7 +35,6 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-micronaut")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-config:config-persistence")) @@ -52,7 +47,6 @@ dependencies { implementation(project(":oss:airbyte-persistence:job-persistence")) runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-cron/src/main/resources/application.yml b/airbyte-cron/src/main/resources/application.yml index e9dc74cdb2b..b89647955b1 100644 --- a/airbyte-cron/src/main/resources/application.yml +++ b/airbyte-cron/src/main/resources/application.yml @@ -178,11 +178,5 @@ temporal: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-data/build.gradle.kts b/airbyte-data/build.gradle.kts index feea3e101cc..a43b269b267 100644 --- a/airbyte-data/build.gradle.kts +++ b/airbyte-data/build.gradle.kts @@ -4,10 +4,6 @@ plugins { `java-test-fixtures` } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -29,7 +25,6 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-protocol")) implementation(project(":oss:airbyte-commons-license")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-config:config-secrets")) implementation(project(":oss:airbyte-db:db-lib")) diff --git a/airbyte-featureflag-server/build.gradle.kts b/airbyte-featureflag-server/build.gradle.kts index 676d0456252..79d8994d5f7 100644 --- a/airbyte-featureflag-server/build.gradle.kts +++ b/airbyte-featureflag-server/build.gradle.kts @@ -3,10 +3,6 @@ plugins { id("io.airbyte.gradle.docker") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { ksp(libs.bundles.micronaut.annotation.processor) ksp(libs.v3.swagger.annotations) @@ -22,6 +18,7 @@ dependencies { implementation(platform(libs.micronaut.platform)) implementation(libs.bundles.micronaut) implementation(libs.bundles.micronaut.kotlin) + implementation(libs.log4j.impl) implementation(libs.jakarta.ws.rs.api) implementation(libs.micronaut.http) implementation(libs.micronaut.security) @@ -32,10 +29,6 @@ dependencies { implementation(libs.kotlin.logging) implementation(project(":oss:airbyte-commons")) - implementation(project(":oss:airbyte-commons-storage")) - - runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) testImplementation(libs.bundles.micronaut.test) testImplementation(libs.mockk) diff --git a/airbyte-featureflag-server/src/main/resources/application.yml b/airbyte-featureflag-server/src/main/resources/application.yml index 5c8fd2b8ed8..d275e9ab59a 100644 --- a/airbyte-featureflag-server/src/main/resources/application.yml +++ b/airbyte-featureflag-server/src/main/resources/application.yml @@ -55,14 +55,3 @@ jackson: mapper: ACCEPT_CASE_INSENSITIVE_ENUMS: true serialization-inclusion: always - -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO - # Uncomment to help resolve issues with conditional beans - # io.micronaut.context.condition: DEBUG diff --git a/airbyte-keycloak-setup/build.gradle.kts b/airbyte-keycloak-setup/build.gradle.kts index e6588cef173..4c511dcc4f0 100644 --- a/airbyte-keycloak-setup/build.gradle.kts +++ b/airbyte-keycloak-setup/build.gradle.kts @@ -4,10 +4,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -22,13 +18,10 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-data")) implementation(project(":oss:airbyte-db:db-lib")) implementation(project(":oss:airbyte-db:jooq")) - runtimeOnly(libs.bundles.logback) - testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-keycloak-setup/src/main/resources/application.yml b/airbyte-keycloak-setup/src/main/resources/application.yml index caa7017fb25..10ab9fcbdf6 100644 --- a/airbyte-keycloak-setup/src/main/resources/application.yml +++ b/airbyte-keycloak-setup/src/main/resources/application.yml @@ -44,14 +44,3 @@ datasources: driverClassName: org.postgresql.Driver username: ${DATABASE_USER} password: ${DATABASE_PASSWORD} - -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG diff --git a/airbyte-metrics/reporter/build.gradle.kts b/airbyte-metrics/reporter/build.gradle.kts index 21518e3bee4..c46fa09b74b 100644 --- a/airbyte-metrics/reporter/build.gradle.kts +++ b/airbyte-metrics/reporter/build.gradle.kts @@ -8,10 +8,6 @@ configurations { create("jdbc") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { annotationProcessor(platform(libs.micronaut.platform)) annotationProcessor(libs.bundles.micronaut.annotation.processor) @@ -19,7 +15,6 @@ dependencies { implementation(platform(libs.micronaut.platform)) implementation(libs.bundles.micronaut) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-db:jooq")) implementation(project(":oss:airbyte-db:db-lib")) @@ -27,7 +22,6 @@ dependencies { implementation(libs.jooq) runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) testAnnotationProcessor(platform(libs.micronaut.platform)) testAnnotationProcessor(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-metrics/reporter/src/main/resources/application.yml b/airbyte-metrics/reporter/src/main/resources/application.yml index 8f24f259bc2..27ed71a4f8c 100644 --- a/airbyte-metrics/reporter/src/main/resources/application.yml +++ b/airbyte-metrics/reporter/src/main/resources/application.yml @@ -35,11 +35,6 @@ endpoints: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + io.airbyte.bootloader: DEBUG +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-notification/build.gradle.kts b/airbyte-notification/build.gradle.kts index 3328531a07e..f04449c1162 100644 --- a/airbyte-notification/build.gradle.kts +++ b/airbyte-notification/build.gradle.kts @@ -3,10 +3,6 @@ plugins { id("io.airbyte.gradle.publish") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -27,6 +23,8 @@ dependencies { implementation(libs.commons.io) implementation(platform(libs.fasterxml)) implementation(libs.bundles.jackson) + // TODO remove this, it"s used for String.isEmpty check) + implementation(libs.bundles.log4j) testImplementation(libs.mockk) testRuntimeOnly(libs.junit.jupiter.engine) diff --git a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java index 91456a1d525..6c3755424b7 100644 --- a/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java +++ b/airbyte-notification/src/main/java/io/airbyte/notification/SlackNotificationClient.java @@ -21,7 +21,6 @@ import io.airbyte.notification.slack.Field; import io.airbyte.notification.slack.Notification; import io.airbyte.notification.slack.Section; -import io.micronaut.core.util.StringUtils; import java.io.IOException; import java.net.URI; import java.net.http.HttpClient; @@ -30,6 +29,7 @@ import java.util.Comparator; import java.util.List; import java.util.Optional; +import org.apache.logging.log4j.util.Strings; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -222,7 +222,7 @@ public boolean notifySchemaPropagated(final SchemaUpdateNotification notificatio notification.getWorkspace().getUrl(), notification.getSourceInfo().getUrl()); final String webhookUrl = config.getWebhook(); - if (!StringUtils.isEmpty(webhookUrl)) { + if (!Strings.isEmpty(webhookUrl)) { return notifyJson(slackNotification.toJsonNode()); } return false; @@ -334,7 +334,7 @@ private boolean notify(final String message) throws IOException, InterruptedExce } private boolean notifyJson(final JsonNode node) throws IOException, InterruptedException { - if (StringUtils.isEmpty(config.getWebhook())) { + if (Strings.isEmpty(config.getWebhook())) { return false; } final ObjectMapper mapper = new ObjectMapper(); @@ -367,7 +367,7 @@ public String getNotificationClientType() { */ public boolean notifyTest(final String message) throws IOException, InterruptedException { final String webhookUrl = config.getWebhook(); - if (!StringUtils.isEmpty(webhookUrl)) { + if (!Strings.isEmpty(webhookUrl)) { return notify(message); } return false; diff --git a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java index 0de0532344e..7e599673a59 100644 --- a/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java +++ b/airbyte-persistence/job-persistence/src/main/java/io/airbyte/persistence/job/tracker/TrackingMetadata.java @@ -24,7 +24,6 @@ import io.airbyte.config.StandardSyncSummary; import io.airbyte.config.SyncStats; import io.airbyte.config.helpers.ScheduleHelpers; -import io.micronaut.core.util.StringUtils; import java.util.Collection; import java.util.Comparator; import java.util.LinkedHashMap; @@ -32,6 +31,7 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.TimeUnit; +import org.apache.logging.log4j.util.Strings; /** * Helpers to fetch stats / metadata about Airbyte domain models and turn them into flat maps that @@ -126,7 +126,7 @@ private static Map generateActorDefinitionVersionMetadata(final final Builder metadata = ImmutableMap.builder(); metadata.put(metaPrefix + "docker_repository", sourceVersion.getDockerRepository()); final String imageTag = sourceVersion.getDockerImageTag(); - if (!StringUtils.isEmpty(imageTag)) { + if (!Strings.isEmpty(imageTag)) { metadata.put(metaPrefix + "version", imageTag); } return metadata.build(); @@ -149,7 +149,7 @@ public static Map generateJobAttemptMetadata(final Job job) { if (attempts == null || attempts.isEmpty()) { return metadata.build(); } - final Attempt lastAttempt = attempts.getLast(); + final Attempt lastAttempt = attempts.get(attempts.size() - 1); if (lastAttempt.getOutput() == null || lastAttempt.getOutput().isEmpty()) { return metadata.build(); } @@ -216,7 +216,7 @@ public static Map generateJobAttemptMetadata(final Job job) { final List failureReasons = failureReasonsList(attempts); if (!failureReasons.isEmpty()) { metadata.put("failure_reasons", failureReasonsListAsJson(failureReasons).toString()); - metadata.put("main_failure_reason", failureReasonAsJson(failureReasons.getFirst()).toString()); + metadata.put("main_failure_reason", failureReasonAsJson(failureReasons.get(0)).toString()); } return metadata.build(); } diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 54061c67e1e..0a2f75dfab6 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -5,10 +5,6 @@ plugins { id("io.airbyte.gradle.kube-reload") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { compileOnly(libs.lombok) annotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut @@ -43,6 +39,7 @@ dependencies { implementation(libs.swagger.annotations) implementation(libs.google.cloud.storage) implementation(libs.cron.utils) + implementation(libs.log4j.slf4j2.impl) // Because cron-utils uses slf4j 2.0+ implementation(libs.jakarta.ws.rs.api) implementation(libs.jakarta.validation.api) implementation(libs.kubernetes.client) @@ -55,9 +52,9 @@ dependencies { implementation(project(":oss:airbyte-commons-auth")) implementation(project(":oss:airbyte-commons-converters")) implementation(project(":oss:airbyte-commons-license")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-server")) @@ -78,9 +75,7 @@ dependencies { implementation(libs.airbyte.protocol) implementation(project(":oss:airbyte-persistence:job-persistence")) - runtimeOnly(libs.snakeyaml) runtimeOnly(libs.javax.databind) - runtimeOnly(libs.bundles.logback) // Required for local database secret hydration) runtimeOnly(libs.hikaricp) diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index acf16a87251..ded8be3a392 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -450,12 +450,6 @@ jooq: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO # Uncomment to help resolve issues with conditional beans # io.micronaut.context.condition: DEBUG # Uncomment to help resolve issues with security beans diff --git a/airbyte-workers/build.gradle.kts b/airbyte-workers/build.gradle.kts index 63c65c27c99..49331274c13 100644 --- a/airbyte-workers/build.gradle.kts +++ b/airbyte-workers/build.gradle.kts @@ -30,7 +30,6 @@ val jdbc by configurations.creating configurations.all { // The quartz-scheduler brings in an outdated version(of hikari, we do not want to inherit this version.) exclude(group = "com.zaxxer", module = "HikariCP-java7") - exclude(group="org.apache.logging.log4j") } dependencies { @@ -74,10 +73,10 @@ dependencies { implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-converters")) + implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-micronaut")) implementation(project(":oss:airbyte-commons-micronaut-security")) implementation(project(":oss:airbyte-commons-protocol")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-worker")) @@ -99,7 +98,6 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.javax.databind) - runtimeOnly(libs.bundles.logback) testCompileOnly(libs.lombok) testAnnotationProcessor(libs.lombok) // Lombok must be added BEFORE Micronaut diff --git a/airbyte-workers/src/main/resources/application.yml b/airbyte-workers/src/main/resources/application.yml index 3661cb71ac6..4b3d8300e8e 100644 --- a/airbyte-workers/src/main/resources/application.yml +++ b/airbyte-workers/src/main/resources/application.yml @@ -294,11 +294,6 @@ temporal: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + io.airbyte.bootloader: DEBUG +# Uncomment to help resolve issues with conditional beans +# io.micronaut.context.condition: DEBUG diff --git a/airbyte-workload-api-server/build.gradle.kts b/airbyte-workload-api-server/build.gradle.kts index 14c183f1501..4ca59ef7597 100644 --- a/airbyte-workload-api-server/build.gradle.kts +++ b/airbyte-workload-api-server/build.gradle.kts @@ -4,10 +4,6 @@ plugins { id("io.airbyte.gradle.docker") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { ksp(libs.v3.swagger.annotations) ksp(platform(libs.micronaut.platform)) @@ -29,6 +25,7 @@ dependencies { implementation(libs.jakarta.transaction.api) implementation(libs.bundles.temporal) implementation(libs.bundles.temporal.telemetry) + implementation(libs.log4j.impl) implementation(libs.micronaut.jaxrs.server) implementation(libs.jakarta.ws.rs.api) implementation(libs.micronaut.security) @@ -43,7 +40,6 @@ dependencies { implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-commons")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-config:config-models")) implementation(project(":oss:airbyte-featureflag")) @@ -54,7 +50,6 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.javax.databind) - runtimeOnly(libs.bundles.logback) kspTest(platform(libs.micronaut.platform)) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-workload-api-server/src/main/resources/application.yml b/airbyte-workload-api-server/src/main/resources/application.yml index 78f4bd85d4e..b1d05b074a7 100644 --- a/airbyte-workload-api-server/src/main/resources/application.yml +++ b/airbyte-workload-api-server/src/main/resources/application.yml @@ -132,20 +132,14 @@ temporal: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG + # io.micronaut.data.query: TRACE + # Uncomment to help resolve issues with conditional beans + # io.micronaut.context.condition: DEBUG # Uncomment to help resolve issues with security beans -# io.micronaut.security: DEBUG +# io.micronaut.security: DEBUG # Uncomment to help resolve issues with micronaut data -# io.micronaut.data.query: TRACE -# com.zaxxer.hikari.HikariConfig: DEBUG -# com.zaxxer.hikari: TRACE +# com.zaxxer.hikari.HikariConfig: DEBUG +# com.zaxxer.hikari: TRACE jackson: mapper: diff --git a/airbyte-workload-init-container/build.gradle.kts b/airbyte-workload-init-container/build.gradle.kts index 28bf715d863..013d67cfa42 100644 --- a/airbyte-workload-init-container/build.gradle.kts +++ b/airbyte-workload-init-container/build.gradle.kts @@ -8,7 +8,6 @@ configurations.all { exclude(group = "io.micronaut", module = "micronaut-http-server-netty") exclude(group = "io.micronaut.openapi") exclude(group = "io.micronaut.flyway") - exclude(group="org.apache.logging.log4j") } dependencies { @@ -25,7 +24,6 @@ dependencies { implementation(project(":oss:airbyte-api:server-api")) implementation(project(":oss:airbyte-api:workload-api")) implementation(project(":oss:airbyte-config:config-secrets")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-worker")) implementation(project(":oss:airbyte-featureflag")) implementation(project(":oss:airbyte-mappers")) @@ -33,9 +31,6 @@ dependencies { implementation(project(":oss:airbyte-worker-models")) implementation(project(":oss:airbyte-commons-protocol")) - runtimeOnly(libs.snakeyaml) - runtimeOnly(libs.bundles.logback) - kspTest(platform(libs.micronaut.platform)) kspTest(libs.bundles.micronaut.annotation.processor) kspTest(libs.bundles.micronaut.test.annotation.processor) diff --git a/airbyte-workload-init-container/src/main/resources/application.yml b/airbyte-workload-init-container/src/main/resources/application.yml index f19c45a83e3..e2cddf984eb 100644 --- a/airbyte-workload-init-container/src/main/resources/application.yml +++ b/airbyte-workload-init-container/src/main/resources/application.yml @@ -67,14 +67,3 @@ airbyte: retries: delay-seconds: ${WORKLOAD_API_RETRY_DELAY_SECONDS:2} max: ${WORKLOAD_API_MAX_RETRIES:5} - -logger: - levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO -# Uncomment to help resolve issues with conditional beans -# io.micronaut.context.condition: DEBUG diff --git a/airbyte-workload-launcher/build.gradle.kts b/airbyte-workload-launcher/build.gradle.kts index 92742d421c1..df337de64b2 100644 --- a/airbyte-workload-launcher/build.gradle.kts +++ b/airbyte-workload-launcher/build.gradle.kts @@ -4,10 +4,6 @@ plugins { id("io.airbyte.gradle.docker") } -configurations.all { - exclude(group="org.apache.logging.log4j") -} - dependencies { ksp(platform(libs.micronaut.platform)) ksp(libs.bundles.micronaut.annotation.processor) @@ -15,6 +11,7 @@ dependencies { implementation(libs.bundles.datadog) implementation(libs.bundles.kubernetes.client) + implementation(libs.bundles.log4j) implementation(libs.bundles.micronaut) implementation(libs.bundles.temporal) implementation(libs.bundles.temporal.telemetry) @@ -38,7 +35,6 @@ dependencies { implementation(project(":oss:airbyte-commons")) implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-micronaut")) - implementation(project(":oss:airbyte-commons-storage")) implementation(project(":oss:airbyte-commons-temporal")) implementation(project(":oss:airbyte-commons-temporal-core")) implementation(project(":oss:airbyte-commons-with-dependencies")) @@ -53,8 +49,8 @@ dependencies { runtimeOnly(libs.snakeyaml) runtimeOnly(libs.kotlin.reflect) + runtimeOnly(libs.appender.log4j2) runtimeOnly(libs.bundles.bouncycastle) - runtimeOnly(libs.bundles.logback) // Required for secret hydration in OSS runtimeOnly(libs.hikaricp) diff --git a/airbyte-workload-launcher/src/main/resources/application.yml b/airbyte-workload-launcher/src/main/resources/application.yml index 5c94d5f623d..e3aadac1dca 100644 --- a/airbyte-workload-launcher/src/main/resources/application.yml +++ b/airbyte-workload-launcher/src/main/resources/application.yml @@ -298,12 +298,6 @@ temporal: logger: levels: - com.zaxxer.hikari: ERROR - com.zaxxer.hikari.pool: ERROR - io.grpc: INFO - io.fabric8.kubernetes.client: INFO - io.netty: INFO - io.temporal: INFO # Uncomment to help resolve issues with conditional beans # io.micronaut.context.condition: DEBUG diff --git a/deps.toml b/deps.toml index cc8c65bf91c..141f7190a6f 100644 --- a/deps.toml +++ b/deps.toml @@ -24,7 +24,6 @@ kotest = "5.9.1" kotlin-logging = "5.1.0" kubernetes-client = "6.12.1" log4j = "2.23.1" -logback = "1.5.8" lombok = "1.18.34" micronaut = "4.6.2" micronaut-cache = "5.0.1" @@ -116,7 +115,6 @@ jakarta-persistence-api = { module = "jakarta.persistence:jakarta.persistence-ap jakarta-transaction-api = { module = "jakarta.transaction:jakarta.transaction-api", version = "2.0.1" } jakarta-validation-api = { module = "jakarta.validation:jakarta.validation-api", version = "3.0.2" } jakarta-ws-rs-api = { module = "jakarta.ws.rs:jakarta.ws.rs-api", version.ref = "jax-rs" } -janino = { module = "org.codehaus.janino:janino", version = "3.1.12" } java-dogstatsd-client = { module = "com.datadoghq:java-dogstatsd-client", version = "4.1.0" } java-jwt = { module = "com.auth0:java-jwt", version = "3.19.2" } javax-databind = { module = "javax.xml.bind:jaxb-api", version = "2.4.0-b180830.0359" } @@ -157,8 +155,6 @@ log4j-impl = { module = "org.apache.logging.log4j:log4j-slf4j-impl", version.ref log4j-over-slf4j = { module = "org.slf4j:log4j-over-slf4j", version.ref = "slf4j" } log4j-slf4j2-impl = { module = "org.apache.logging.log4j:log4j-slf4j2-impl", version.ref = "log4j" } log4j-web = { module = "org.apache.logging.log4j:log4j-web", version.ref = "log4j" } -logback-classic = { module = "ch.qos.logback:logback-classic", version.ref = "logback" } -logback-core = { module = "ch.qos.logback:logback-core", version.ref = "logback" } lombok = { module = "org.projectlombok:lombok", version.ref = "lombok" } micrometer-statsd = { module = "io.micrometer:micrometer-registry-statsd", version = "1.9.3" } moshi-kotlin = { module = "com.squareup.moshi:moshi-kotlin", version.ref = "moshi" } @@ -263,7 +259,6 @@ keycloak-client = ["keycloak-admin-client", "keycloak-client-registration-api"] kotest = ["kotest-assertions"] kubernetes-client = ["kubernetes-client-api", "kubernetes-client"] log4j = ["log4j-api", "log4j-core", "log4j-impl", "log4j-slf4j2-impl", "log4j-web", "appender-log4j2"] -logback = ["logback-classic", "logback-core", "janino"] micronaut = ["jakarta-annotation-api", "jakarta-transaction-api", "micronaut-http-server-netty", "micronaut-http-client", "micronaut-inject", "micronaut-validation", "micronaut-runtime", "micronaut-management", "micronaut-flyway", "micronaut-jdbc-hikari", "micronaut-jooq", "micronaut-jackson-databind"] micronaut-light = ["jakarta-annotation-api", "micronaut-inject-java", "micronaut-runtime"] micronaut-annotation = ["jakarta-annotation-api", "micronaut-inject-java", "micronaut-inject-kotlin"] From 83db26a26a1b8f0681ece0b8ae48b47137213b85 Mon Sep 17 00:00:00 2001 From: Bryce Groff Date: Mon, 30 Sep 2024 13:39:12 -0700 Subject: [PATCH 22/36] refactor: remove the CatalogService from the ConfigRepository (#14171) --- .../config/DatabaseBeanFactory.java | 1 - .../io/airbyte/bootloader/BootloaderTest.java | 4 - .../server/handlers/ConnectionsHandler.java | 22 ++-- .../server/handlers/SchedulerHandler.java | 26 ++-- .../server/handlers/SourceHandler.java | 8 +- .../WebBackendConnectionsHandler.java | 12 +- .../handlers/ConnectionsHandlerTest.java | 44 +++++-- .../server/handlers/SchedulerHandlerTest.java | 91 +++++++------ .../server/handlers/SourceHandlerTest.java | 8 +- .../WebBackendConnectionsHandlerTest.java | 44 ++++--- .../config/persistence/ConfigRepository.java | 122 ------------------ ...finitionBreakingChangePersistenceTest.java | 2 - .../ActorDefinitionPersistenceTest.java | 2 - ...ActorDefinitionVersionPersistenceTest.java | 2 - .../persistence/ConfigInjectionTest.java | 2 - .../ConfigRepositoryE2EReadWriteTest.java | 46 +++---- .../ConnectorMetadataPersistenceTest.java | 2 - .../PermissionPersistenceTest.java | 2 - .../StandardSyncPersistenceTest.java | 2 - .../persistence/StatePersistenceTest.java | 3 - .../SyncOperationPersistenceTest.java | 2 - .../persistence/UserPersistenceTest.java | 2 - .../persistence/WorkspaceFilterTest.java | 2 - .../persistence/WorkspacePersistenceTest.java | 2 - .../cron/config/DatabaseBeanFactory.java | 1 - .../server/config/DatabaseBeanFactory.java | 1 - .../io/airbyte/server/apis/SourceApiTest.java | 3 +- 27 files changed, 187 insertions(+), 271 deletions(-) diff --git a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java index 1fe8d79b83e..aae0f0115a0 100644 --- a/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java +++ b/airbyte-bootloader/src/main/java/io/airbyte/bootloader/config/DatabaseBeanFactory.java @@ -132,7 +132,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin final WorkspaceService workspaceService) { return new ConfigRepository( actorDefinitionService, - catalogService, connectionService, connectorBuilderService, destinationService, diff --git a/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java b/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java index 35be98efe01..4a2ab878e6b 100644 --- a/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java +++ b/airbyte-bootloader/src/test-integration/java/io/airbyte/bootloader/BootloaderTest.java @@ -38,7 +38,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; @@ -178,7 +177,6 @@ void testBootloaderAppBlankDb() throws Exception { secretPersistenceConfigService); val configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(configDatabase), - new CatalogServiceJooqImpl(configDatabase), connectionService, connectorBuilderService, destinationService, @@ -287,7 +285,6 @@ void testRequiredVersionUpgradePredicate() throws Exception { mock(SecretPersistenceConfigService.class)); val configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(configDatabase), - new CatalogServiceJooqImpl(configDatabase), connectionService, connectorBuilderService, destinationService, @@ -402,7 +399,6 @@ void testPostLoadExecutionExecutes() throws Exception { scopedConfigurationService); val configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(configDatabase), - new CatalogServiceJooqImpl(configDatabase), connectionService, connectorBuilderService, new DestinationServiceJooqImpl(configDatabase, diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java index a4c0d3d660a..6175360f152 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/ConnectionsHandler.java @@ -117,6 +117,7 @@ import io.airbyte.config.persistence.domain.Generation; import io.airbyte.config.persistence.helper.CatalogGenerationSetter; import io.airbyte.data.repositories.entities.ConnectionTimelineEvent; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.ConnectionTimelineEventService; import io.airbyte.data.services.StreamStatusesService; import io.airbyte.data.services.shared.ConnectionAutoDisabledReason; @@ -181,6 +182,7 @@ public class ConnectionsHandler { private final JobPersistence jobPersistence; private final ConfigRepository configRepository; + private final CatalogService catalogService; private final Supplier uuidGenerator; private final WorkspaceHelper workspaceHelper; private final TrackingClient trackingClient; @@ -207,6 +209,7 @@ public class ConnectionsHandler { public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, final JobPersistence jobPersistence, final ConfigRepository configRepository, + final CatalogService catalogService, @Named("uuidGenerator") final Supplier uuidGenerator, final WorkspaceHelper workspaceHelper, final TrackingClient trackingClient, @@ -228,6 +231,7 @@ public ConnectionsHandler(final StreamRefreshesHandler streamRefreshesHandler, final StatePersistence statePersistence) { this.jobPersistence = jobPersistence; this.configRepository = configRepository; + this.catalogService = catalogService; this.uuidGenerator = uuidGenerator; this.workspaceHelper = workspaceHelper; this.trackingClient = trackingClient; @@ -859,7 +863,7 @@ public CatalogDiff getDiff(final AirbyteCatalog oldCatalog, } public CatalogDiff getDiff(final ConnectionRead connectionRead, final AirbyteCatalog discoveredCatalog) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final var catalogWithSelectedFieldsAnnotated = connectionRead.getSyncCatalog(); final var configuredCatalog = CatalogConverter.toConfiguredInternal(catalogWithSelectedFieldsAnnotated); @@ -927,12 +931,12 @@ private Map catalogToPerStreamConf } public Optional getConnectionAirbyteCatalog(final UUID connectionId) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final StandardSync connection = configRepository.getStandardSync(connectionId); if (connection.getSourceCatalogId() == null) { return Optional.empty(); } - final ActorCatalog catalog = configRepository.getActorCatalogById(connection.getSourceCatalogId()); + final ActorCatalog catalog = catalogService.getActorCatalogById(connection.getSourceCatalogId()); final StandardSourceDefinition sourceDefinition = configRepository.getSourceDefinitionFromSource(connection.getSourceId()); final SourceConnection sourceConnection = configRepository.getSourceConnection(connection.getSourceId()); final ActorDefinitionVersion sourceVersion = @@ -1369,7 +1373,7 @@ public List getConnectionStreamHistory( } public ConnectionAutoPropagateResult applySchemaChange(final ConnectionAutoPropagateSchemaChange request) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { return applySchemaChange(request.getConnectionId(), request.getWorkspaceId(), request.getCatalogId(), request.getCatalog(), false); } @@ -1379,7 +1383,7 @@ public ConnectionAutoPropagateResult applySchemaChange( final UUID catalogId, final AirbyteCatalog catalog, final Boolean autoApply) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { LOGGER.info("Applying schema change for connection '{}' only", connectionId); final ConnectionRead connection = buildConnectionRead(connectionId); @@ -1525,7 +1529,7 @@ public List getConnectionLastJobPerStream(fi * diff, conditionally disables and auto-propagates schema changes. */ public PostprocessDiscoveredCatalogResult postprocessDiscoveredCatalog(final UUID connectionId, final UUID discoveredCatalogId) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final var read = diffCatalogAndConditionallyDisable(connectionId, discoveredCatalogId); final var autoPropResult = @@ -1578,7 +1582,7 @@ public ConnectionRead updateSchemaChangesAndAutoDisableConnectionIfNeeded(final * breaking changes then disable the connection if necessary. */ public SourceDiscoverSchemaRead diffCatalogAndConditionallyDisable(final UUID connectionId, final UUID discoveredCatalogId) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final var connectionRead = getConnection(connectionId); final var source = configRepository.getSourceConnection(connectionRead.getSourceId()); final var sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); @@ -1598,9 +1602,9 @@ public SourceDiscoverSchemaRead diffCatalogAndConditionallyDisable(final UUID co } private AirbyteCatalog retrieveDiscoveredCatalog(final UUID catalogId, final ActorDefinitionVersion sourceVersion) - throws ConfigNotFoundException, IOException { + throws IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - final ActorCatalog catalog = configRepository.getActorCatalogById(catalogId); + final ActorCatalog catalog = catalogService.getActorCatalogById(catalogId); final io.airbyte.protocol.models.AirbyteCatalog persistenceCatalog = Jsons.object( catalog.getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java index 7674f57febb..28acbf8a5ef 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SchedulerHandler.java @@ -84,6 +84,7 @@ import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.shared.ConnectionAutoUpdatedReason; @@ -135,6 +136,7 @@ public class SchedulerHandler { private final ConnectionsHandler connectionsHandler; private final ConfigRepository configRepository; + private final CatalogService catalogService; private final SecretsRepositoryWriter secretsRepositoryWriter; private final SynchronousSchedulerClient synchronousSchedulerClient; private final ConfigurationUpdate configurationUpdate; @@ -159,6 +161,7 @@ public class SchedulerHandler { @VisibleForTesting public SchedulerHandler(final ConfigRepository configRepository, + final CatalogService catalogService, final SecretsRepositoryWriter secretsRepositoryWriter, final SynchronousSchedulerClient synchronousSchedulerClient, final ConfigurationUpdate configurationUpdate, @@ -183,6 +186,7 @@ public SchedulerHandler(final ConfigRepository configRepository, final NotificationHelper notificationHelper, final ConnectionTimelineEventHelper connectionTimelineEventHelper) { this.configRepository = configRepository; + this.catalogService = catalogService; this.secretsRepositoryWriter = secretsRepositoryWriter; this.synchronousSchedulerClient = synchronousSchedulerClient; this.configurationUpdate = configurationUpdate; @@ -342,7 +346,7 @@ public CheckConnectionRead checkDestinationConnectionFromDestinationIdForUpdate( } public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final SourceDiscoverSchemaRequestBody req) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = configRepository.getSourceConnection(req.getSourceId()); if (featureFlagClient.boolVariation(DiscoverPostprocessInTemporal.INSTANCE, new Workspace(source.getWorkspaceId()))) { @@ -357,7 +361,7 @@ public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceId(final Source */ public SourceDiscoverSchemaRead discoverAndGloballyDisable(final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody, final SourceConnection source) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID sourceId = discoverSchemaRequestBody.getSourceId(); final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); final ActorDefinitionVersion sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDef, source.getWorkspaceId(), sourceId); @@ -372,7 +376,7 @@ public SourceDiscoverSchemaRead discoverAndGloballyDisable(final SourceDiscoverS Charsets.UTF_8)).toString(); final String connectorVersion = sourceVersion.getDockerImageTag(); final Optional currentCatalog = - configRepository.getActorCatalog(discoverSchemaRequestBody.getSourceId(), connectorVersion, configHash); + catalogService.getActorCatalog(discoverSchemaRequestBody.getSourceId(), connectorVersion, configHash); final boolean bustActorCatalogCache = discoverSchemaRequestBody.getDisableCache() != null && discoverSchemaRequestBody.getDisableCache(); if (currentCatalog.isEmpty() || bustActorCatalogCache) { final SynchronousResponse persistedCatalogId = @@ -411,7 +415,7 @@ public SourceDiscoverSchemaRead discoverAndGloballyDisable(final SourceDiscoverS * Runs discover schema and does not disable other connections. */ public SourceDiscoverSchemaRead discover(final SourceDiscoverSchemaRequestBody req, final SourceConnection source) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID sourceId = req.getSourceId(); final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(source.getSourceDefinitionId()); final ActorDefinitionVersion sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDef, source.getWorkspaceId(), sourceId); @@ -428,7 +432,7 @@ public SourceDiscoverSchemaRead discover(final SourceDiscoverSchemaRequestBody r final String connectorVersion = sourceVersion.getDockerImageTag(); final Optional existingCatalog = - configRepository.getActorCatalog(req.getSourceId(), connectorVersion, configHash); + catalogService.getActorCatalog(req.getSourceId(), connectorVersion, configHash); // No catalog exists, run discover. if (existingCatalog.isEmpty()) { @@ -456,7 +460,7 @@ private SourceDiscoverSchemaRead runDiscoverJobDiffAndConditionallyDisable(final final ActorDefinitionVersion sourceVersion, final io.airbyte.api.model.generated.WorkloadPriority priority, final UUID connectionId) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final boolean isCustomConnector = sourceDef.getCustom(); // ResourceRequirements are read from actor definition and can be null; but if it's not null it will // have higher priority and overwrite @@ -483,7 +487,7 @@ private SourceDiscoverSchemaRead runDiscoverJobDiffAndConditionallyDisable(final } public void applySchemaChangeForSource(final SourceAutoPropagateChange sourceAutoPropagateChange) - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { LOGGER.info("Applying schema changes for source '{}' in workspace '{}'", sourceAutoPropagateChange.getSourceId(), sourceAutoPropagateChange.getWorkspaceId()); if (sourceAutoPropagateChange.getSourceId() == null) { @@ -557,7 +561,7 @@ public void applySchemaChangeForSource(final SourceAutoPropagateChange sourceAut } public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceCreate(final SourceCoreConfig sourceCreate) - throws ConfigNotFoundException, IOException, JsonValidationException { + throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final StandardSourceDefinition sourceDef = configRepository.getStandardSourceDefinition(sourceCreate.getSourceDefinitionId()); final ActorDefinitionVersion sourceVersion = actorDefinitionVersionHelper.getSourceVersion(sourceDef, sourceCreate.getWorkspaceId(), sourceCreate.getSourceId()); @@ -588,12 +592,12 @@ public SourceDiscoverSchemaRead discoverSchemaForSourceFromSourceCreate(final So } private SourceDiscoverSchemaRead retrieveDiscoveredSchema(final SynchronousResponse response, final ActorDefinitionVersion sourceVersion) - throws ConfigNotFoundException, IOException { + throws IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceDiscoverSchemaRead sourceDiscoverSchemaRead = new SourceDiscoverSchemaRead() .jobInfo(jobConverter.getSynchronousJobRead(response)); if (response.isSuccess()) { - final ActorCatalog catalog = configRepository.getActorCatalogById(response.getOutput()); + final ActorCatalog catalog = catalogService.getActorCatalogById(response.getOutput()); final AirbyteCatalog persistenceCatalog = Jsons.object(catalog.getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); sourceDiscoverSchemaRead.catalog(CatalogConverter.toApi(persistenceCatalog, sourceVersion)); @@ -691,7 +695,7 @@ public JobInfoRead cancelJob(final JobIdRequestBody jobIdRequestBody) throws IOE // containsBreakingChange parameter, and connectionStatus parameter. private void generateCatalogDiffsAndDisableConnectionsIfNeeded(final SourceDiscoverSchemaRead discoveredSchema, final SourceDiscoverSchemaRequestBody discoverSchemaRequestBody) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final ConnectionReadList connectionsForSource = connectionsHandler.listConnectionsForSource(discoverSchemaRequestBody.getSourceId(), false); for (final ConnectionRead connectionRead : connectionsForSource.getConnections()) { final Optional catalogUsedToMakeConfiguredCatalog = connectionsHandler diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java index af9a868f075..8740a25b144 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/SourceHandler.java @@ -49,6 +49,7 @@ import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.config.secrets.persistence.RuntimeSecretPersistence; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -77,6 +78,7 @@ public class SourceHandler { private final Supplier uuidGenerator; private final ConfigRepository configRepository; + private final CatalogService catalogService; private final SecretsRepositoryReader secretsRepositoryReader; private final JsonSchemaValidator validator; private final ConnectionsHandler connectionsHandler; @@ -93,6 +95,7 @@ public class SourceHandler { @VisibleForTesting public SourceHandler(final ConfigRepository configRepository, + final CatalogService catalogService, final SecretsRepositoryReader secretsRepositoryReader, final JsonSchemaValidator integrationSchemaValidation, final ConnectionsHandler connectionsHandler, @@ -108,6 +111,7 @@ public SourceHandler(final ConfigRepository configRepository, final ActorDefinitionHandlerHelper actorDefinitionHandlerHelper, final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater) { this.configRepository = configRepository; + this.catalogService = catalogService; this.secretsRepositoryReader = secretsRepositoryReader; validator = integrationSchemaValidation; this.connectionsHandler = connectionsHandler; @@ -253,7 +257,7 @@ public SourceRead getSource(final SourceIdRequestBody sourceIdRequestBody) public ActorCatalogWithUpdatedAt getMostRecentSourceActorCatalogWithUpdatedAt(final SourceIdRequestBody sourceIdRequestBody) throws IOException { final Optional actorCatalog = - configRepository.getMostRecentSourceActorCatalog(sourceIdRequestBody.getSourceId()); + catalogService.getMostRecentSourceActorCatalog(sourceIdRequestBody.getSourceId()); if (actorCatalog.isEmpty()) { return new ActorCatalogWithUpdatedAt(); } else { @@ -394,7 +398,7 @@ public DiscoverCatalogResult writeDiscoverCatalogResult(final SourceDiscoverSche } private UUID writeActorCatalog(final AirbyteCatalog persistenceCatalog, final SourceDiscoverSchemaWriteRequestBody request) throws IOException { - return configRepository.writeActorCatalogFetchEvent( + return catalogService.writeActorCatalogFetchEvent( persistenceCatalog, request.getSourceId(), request.getConnectorVersion(), diff --git a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java index f7babdbd851..0c82ff3aa2c 100644 --- a/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java +++ b/airbyte-commons-server/src/main/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandler.java @@ -69,6 +69,7 @@ import io.airbyte.config.persistence.ConfigNotFoundException; import io.airbyte.config.persistence.ConfigRepository; import io.airbyte.config.persistence.ConfigRepository.StandardSyncQuery; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.ConnectionService; import io.airbyte.featureflag.FeatureFlagClient; import io.airbyte.metrics.lib.ApmTraceUtils; @@ -109,6 +110,7 @@ public class WebBackendConnectionsHandler { // todo (cgardens) - this handler should NOT have access to the db. only access via handler. @Deprecated private final ConfigRepository configRepositoryDoNotUse; + private final CatalogService catalogService; private final ConnectionService connectionService; private final ActorDefinitionVersionHelper actorDefinitionVersionHelper; private final FeatureFlagClient featureFlagClient; @@ -124,6 +126,7 @@ public WebBackendConnectionsHandler(final ActorDefinitionVersionHandler actorDef final OperationsHandler operationsHandler, final EventRunner eventRunner, final ConfigRepository configRepositoryDoNotUse, + CatalogService catalogService, final ConnectionService connectionService, final ActorDefinitionVersionHelper actorDefinitionVersionHelper, final FieldGenerator fieldGenerator, @@ -138,6 +141,7 @@ public WebBackendConnectionsHandler(final ActorDefinitionVersionHandler actorDef this.operationsHandler = operationsHandler; this.eventRunner = eventRunner; this.configRepositoryDoNotUse = configRepositoryDoNotUse; + this.catalogService = catalogService; this.connectionService = connectionService; this.actorDefinitionVersionHelper = actorDefinitionVersionHelper; this.fieldGenerator = fieldGenerator; @@ -184,7 +188,7 @@ public WebBackendConnectionReadList webBackendListConnectionsForWorkspace(final // right status filtering for this. final Map runningJobByConnectionId = getRunningJobByConnectionId(connectionIds); final Map newestFetchEventsByActorId = - configRepositoryDoNotUse.getMostRecentActorCatalogFetchEventForSources(sourceIds); + catalogService.getMostRecentActorCatalogFetchEventForSources(sourceIds); final List connectionItems = Lists.newArrayList(); @@ -246,7 +250,7 @@ private WebBackendConnectionRead buildWebBackendConnectionRead(final ConnectionR }); final Optional mostRecentFetchEvent = - configRepositoryDoNotUse.getMostRecentActorCatalogFetchEventForSource(connectionRead.getSourceId()); + catalogService.getMostRecentActorCatalogFetchEventForSource(connectionRead.getSourceId()); final SchemaChange schemaChange = getSchemaChange(connectionRead, currentSourceCatalogId, mostRecentFetchEvent); @@ -461,7 +465,7 @@ private AirbyteCatalog updateSchemaWithOriginalDiscoveredCatalog(final AirbyteCa } private Optional getRefreshedSchema(final UUID sourceId, final UUID connectionId) - throws JsonValidationException, ConfigNotFoundException, IOException { + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceDiscoverSchemaRequestBody discoverSchemaReadReq = new SourceDiscoverSchemaRequestBody() .sourceId(sourceId) .disableCache(true) @@ -618,7 +622,7 @@ public WebBackendConnectionRead webBackendUpdateConnection(final WebBackendConne // Get the most recent actor catalog fetched for this connection's source and the newly updated sync // catalog final Optional mostRecentActorCatalog = - configRepositoryDoNotUse.getMostRecentActorCatalogForSource(originalConnectionRead.getSourceId()); + catalogService.getMostRecentActorCatalogForSource(originalConnectionRead.getSourceId()); final AirbyteCatalog newAirbyteCatalog = webBackendConnectionPatch.getSyncCatalog(); // Get the diff between these two catalogs to check for breaking changes if (mostRecentActorCatalog.isPresent()) { diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java index c38f68774ac..13be7ba3374 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/ConnectionsHandlerTest.java @@ -153,6 +153,7 @@ import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.ConnectionTimelineEventService; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.SecretPersistenceConfigService; @@ -279,6 +280,7 @@ class ConnectionsHandlerTest { private ConnectionTimelineEventService connectionTimelineEventService; private ConnectionTimelineEventHelper connectionTimelineEventHelper; private StatePersistence statePersistence; + private CatalogService catalogService; @SuppressWarnings("unchecked") @BeforeEach @@ -366,6 +368,7 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio jobPersistence = mock(JobPersistence.class); streamRefreshesHandler = mock(StreamRefreshesHandler.class); configRepository = mock(ConfigRepository.class); + catalogService = mock(CatalogService.class); uuidGenerator = mock(Supplier.class); workspaceHelper = mock(WorkspaceHelper.class); trackingClient = mock(TrackingClient.class); @@ -405,7 +408,9 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio featureFlagClient, actorDefinitionHandlerHelper, actorDefinitionVersionUpdater); - sourceHandler = new SourceHandler(configRepository, + sourceHandler = new SourceHandler( + configRepository, + catalogService, secretsRepositoryReader, jsonSchemaValidator, connectionsHandler, @@ -413,7 +418,11 @@ void setUp() throws IOException, JsonValidationException, ConfigNotFoundExceptio secretsProcessor, configurationUpdate, oAuthConfigSupplier, - actorDefinitionVersionHelper, featureFlagClient, sourceService, workspaceService, secretPersistenceConfigService, + actorDefinitionVersionHelper, + featureFlagClient, + sourceService, + workspaceService, + secretPersistenceConfigService, actorDefinitionHandlerHelper, actorDefinitionVersionUpdater); @@ -440,6 +449,7 @@ void setUp() throws JsonValidationException, ConfigNotFoundException, IOExceptio streamRefreshesHandler, jobPersistence, configRepository, + catalogService, uuidGenerator, workspaceHelper, trackingClient, @@ -1861,6 +1871,7 @@ void setUp() { streamRefreshesHandler, jobPersistence, configRepository, + catalogService, uuidGenerator, workspaceHelper, trackingClient, @@ -2095,6 +2106,7 @@ void setUp() { streamRefreshesHandler, jobPersistence, configRepository, + catalogService, uuidGenerator, workspaceHelper, trackingClient, @@ -2720,7 +2732,7 @@ class ApplySchemaChanges { .withId(UUID.randomUUID()); @BeforeEach - void setup() throws IOException, JsonValidationException, ConfigNotFoundException { + void setup() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { airbyteCatalog.getStreams().get(0).withSupportedSyncModes(List.of(io.airbyte.protocol.models.SyncMode.FULL_REFRESH)); standardSync = new StandardSync() .withConnectionId(CONNECTION_ID) @@ -2730,7 +2742,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio .withManual(true) .withNonBreakingChangesPreference(StandardSync.NonBreakingChangesPreference.PROPAGATE_FULLY); - when(configRepository.getActorCatalogById(SOURCE_CATALOG_ID)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(SOURCE_CATALOG_ID)).thenReturn(actorCatalog); when(configRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); when(configRepository.getSourceConnection(SOURCE_ID)).thenReturn(source); when(configRepository.getStandardWorkspaceNoSecrets(WORKSPACE_ID, false)).thenReturn(WORKSPACE); @@ -2746,6 +2758,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio streamRefreshesHandler, jobPersistence, configRepository, + catalogService, uuidGenerator, workspaceHelper, trackingClient, @@ -2768,7 +2781,8 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio } @Test - void testAutoPropagateSchemaChange() throws IOException, ConfigNotFoundException, JsonValidationException { + void testAutoPropagateSchemaChange() + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Somehow standardSync is being mutated in the test (the catalog is changed) and verifying that the // notification function is called correctly requires the original object. final StandardSync originalSync = Jsons.clone(standardSync); @@ -2816,7 +2830,8 @@ void testAutoPropagateSchemaChange() throws IOException, ConfigNotFoundException } @Test - void testAutoPropagateColumnsOnly() throws JsonValidationException, ConfigNotFoundException, IOException { + void testAutoPropagateColumnsOnly() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { // See test above for why this part is necessary. final StandardSync originalSync = Jsons.clone(standardSync); final Field newField = Field.of(A_DIFFERENT_COLUMN, JsonSchemaType.STRING); @@ -2849,7 +2864,8 @@ void testAutoPropagateColumnsOnly() throws JsonValidationException, ConfigNotFou } @Test - void diffCatalogGeneratesADiffAndUpdatesTheConnection() throws JsonValidationException, ConfigNotFoundException, IOException { + void diffCatalogGeneratesADiffAndUpdatesTheConnection() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final Field newField = Field.of(A_DIFFERENT_COLUMN, JsonSchemaType.STRING); final var catalogWithDiff = io.airbyte.protocol.models.CatalogHelpers.createAirbyteCatalog(SHOES, Field.of(SKU, JsonSchemaType.STRING), newField); @@ -2857,7 +2873,7 @@ void diffCatalogGeneratesADiffAndUpdatesTheConnection() throws JsonValidationExc .withCatalog(Jsons.jsonNode(catalogWithDiff)) .withCatalogHash("") .withId(UUID.randomUUID()); - when(configRepository.getActorCatalogById(DISCOVERED_CATALOG_ID)).thenReturn(discoveredCatalog); + when(catalogService.getActorCatalogById(DISCOVERED_CATALOG_ID)).thenReturn(discoveredCatalog); final CatalogDiff expectedDiff = new CatalogDiff().addTransformsItem(new StreamTransform() @@ -2881,7 +2897,8 @@ void diffCatalogGeneratesADiffAndUpdatesTheConnection() throws JsonValidationExc } @Test - void diffCatalogADisablesForBreakingChange() throws JsonValidationException, ConfigNotFoundException, IOException { + void diffCatalogADisablesForBreakingChange() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { try (final MockedStatic helper = Mockito.mockStatic(AutoPropagateSchemaChangeHelper.class)) { helper.when(() -> AutoPropagateSchemaChangeHelper.containsBreakingChange(any())).thenReturn(true); @@ -2896,7 +2913,8 @@ void diffCatalogADisablesForBreakingChange() throws JsonValidationException, Con } @Test - void diffCatalogDisablesForNonBreakingChangeIfConfiguredSo() throws IOException, JsonValidationException, ConfigNotFoundException { + void diffCatalogDisablesForNonBreakingChangeIfConfiguredSo() + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { // configure the sync to be disabled on non-breaking change standardSync = standardSync.withNonBreakingChangesPreference(StandardSync.NonBreakingChangesPreference.DISABLE); when(configRepository.getStandardSync(CONNECTION_ID)).thenReturn(standardSync); @@ -2908,7 +2926,7 @@ void diffCatalogDisablesForNonBreakingChangeIfConfiguredSo() throws IOException, .withCatalog(Jsons.jsonNode(catalogWithDiff)) .withCatalogHash("") .withId(UUID.randomUUID()); - when(configRepository.getActorCatalogById(DISCOVERED_CATALOG_ID)).thenReturn(discoveredCatalog); + when(catalogService.getActorCatalogById(DISCOVERED_CATALOG_ID)).thenReturn(discoveredCatalog); final var result = connectionsHandler.diffCatalogAndConditionallyDisable(CONNECTION_ID, DISCOVERED_CATALOG_ID); @@ -2921,7 +2939,8 @@ void diffCatalogDisablesForNonBreakingChangeIfConfiguredSo() throws IOException, } @Test - void postprocessDiscoveredComposesDiffingAndSchemaPropagation() throws JsonValidationException, ConfigNotFoundException, IOException { + void postprocessDiscoveredComposesDiffingAndSchemaPropagation() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final var catalog = CatalogConverter.toApi(Jsons.clone(airbyteCatalog), SOURCE_VERSION); final var diffResult = new SourceDiscoverSchemaRead().catalog(catalog); final var transform = new StreamTransform().transformType(StreamTransform.TransformTypeEnum.ADD_STREAM) @@ -2949,6 +2968,7 @@ void setUp() { streamRefreshesHandler, jobPersistence, configRepository, + catalogService, uuidGenerator, workspaceHelper, trackingClient, diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java index d35eef9dff7..cb00805bb50 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SchedulerHandlerTest.java @@ -112,6 +112,7 @@ import io.airbyte.config.persistence.StreamResetPersistence; import io.airbyte.config.persistence.domain.StreamRefresh; import io.airbyte.config.secrets.SecretsRepositoryWriter; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.WorkspaceService; import io.airbyte.data.services.shared.ConnectionAutoUpdatedReason; @@ -263,6 +264,7 @@ class SchedulerHandlerTest { private NotificationHelper notificationHelper; private ConnectionTimelineEventHelper connectionTimelineEventHelper; private LogClientManager logClientManager; + private CatalogService catalogService; @BeforeEach void setup() throws JsonValidationException, ConfigNotFoundException, IOException { @@ -282,6 +284,7 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio synchronousSchedulerClient = mock(SynchronousSchedulerClient.class); configRepository = mock(ConfigRepository.class); + catalogService = mock(CatalogService.class); when(configRepository.getStandardSync(any())).thenReturn(new StandardSync().withStatus(StandardSync.Status.ACTIVE)); when(configRepository.getStandardDestinationDefinition(any())).thenReturn(SOME_DESTINATION_DEFINITION); when(configRepository.getDestinationDefinitionFromConnection(any())).thenReturn(SOME_DESTINATION_DEFINITION); @@ -318,6 +321,7 @@ void setup() throws JsonValidationException, ConfigNotFoundException, IOExceptio schedulerHandler = new SchedulerHandler( configRepository, + catalogService, secretsRepositoryWriter, synchronousSchedulerClient, configurationUpdate, @@ -760,7 +764,8 @@ void testCheckConnectionReadFormat(final Optional standardCheckConnectio @ParameterizedTest @ValueSource(booleans = {true, false}) - void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) throws IOException, JsonValidationException, ConfigNotFoundException { + void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); @@ -773,7 +778,7 @@ void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) throws IOExc .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(UUID.randomUUID()); - when(configRepository.getActorCatalogById(any())).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(any())).thenReturn(actorCatalog); when(discoverResponse.getMetadata()).thenReturn(metadata); when(metadata.isSucceeded()).thenReturn(true); @@ -794,7 +799,7 @@ void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) throws IOExc when(actorDefinitionVersionHelper.getSourceVersion(sourceDefinition, source.getWorkspaceId(), source.getSourceId())) .thenReturn(sourceVersion); when(configRepository.getSourceConnection(source.getSourceId())).thenReturn(source); - when(configRepository.getActorCatalog(any(), any(), any())).thenReturn(Optional.empty()); + when(catalogService.getActorCatalog(any(), any(), any())).thenReturn(Optional.empty()); when(synchronousSchedulerClient.createDiscoverSchemaJob(source, sourceVersion, false, RESOURCE_REQUIREMENT, WorkloadPriority.HIGH)) .thenReturn(discoverResponse); @@ -805,7 +810,7 @@ void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) throws IOExc assertNotNull(actual.getJobInfo()); assertTrue(actual.getJobInfo().getSucceeded()); verify(configRepository).getSourceConnection(source.getSourceId()); - verify(configRepository).getActorCatalog(eq(request.getSourceId()), eq(SOURCE_DOCKER_TAG), any()); + verify(catalogService).getActorCatalog(eq(request.getSourceId()), eq(SOURCE_DOCKER_TAG), any()); verify(actorDefinitionVersionHelper).getSourceVersion(sourceDefinition, source.getWorkspaceId(), source.getSourceId()); verify(synchronousSchedulerClient).createDiscoverSchemaJob(source, sourceVersion, false, RESOURCE_REQUIREMENT, WorkloadPriority.HIGH); } @@ -813,7 +818,7 @@ void testDiscoverSchemaForSourceFromSourceId(final boolean enabled) throws IOExc @ParameterizedTest @ValueSource(booleans = {true, false}) void testDiscoverSchemaForSourceFromSourceIdCachedCatalog(final boolean enabled) - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); @@ -837,7 +842,7 @@ void testDiscoverSchemaForSourceFromSourceIdCachedCatalog(final boolean enabled) .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(thisCatalogId); - when(configRepository.getActorCatalog(any(), any(), any())).thenReturn(Optional.of(actorCatalog)); + when(catalogService.getActorCatalog(any(), any(), any())).thenReturn(Optional.of(actorCatalog)); final SourceDiscoverSchemaRead actual = schedulerHandler.discoverSchemaForSourceFromSourceId(request); @@ -846,8 +851,8 @@ void testDiscoverSchemaForSourceFromSourceIdCachedCatalog(final boolean enabled) assertEquals(actual.getCatalogId(), discoverResponse.getOutput()); assertTrue(actual.getJobInfo().getSucceeded()); verify(configRepository).getSourceConnection(source.getSourceId()); - verify(configRepository).getActorCatalog(eq(request.getSourceId()), any(), any()); - verify(configRepository, never()).writeActorCatalogFetchEvent(any(), any(), any(), any()); + verify(catalogService).getActorCatalog(eq(request.getSourceId()), any(), any()); + verify(catalogService, never()).writeActorCatalogFetchEvent(any(), any(), any(), any()); verify(actorDefinitionVersionHelper).getSourceVersion(sourceDefinition, source.getWorkspaceId(), source.getSourceId()); verify(synchronousSchedulerClient, never()).createDiscoverSchemaJob(any(), any(), anyBoolean(), any(), any()); } @@ -855,7 +860,7 @@ void testDiscoverSchemaForSourceFromSourceIdCachedCatalog(final boolean enabled) @ParameterizedTest @ValueSource(booleans = {true, false}) void testDiscoverSchemaForSourceFromSourceIdDisableCache(final boolean enabled) - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()).disableCache(true); @@ -883,7 +888,7 @@ void testDiscoverSchemaForSourceFromSourceIdDisableCache(final boolean enabled) .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); when(synchronousSchedulerClient.createDiscoverSchemaJob(source, sourceVersion, false, null, WorkloadPriority.HIGH)) .thenReturn(discoverResponse); @@ -899,7 +904,8 @@ void testDiscoverSchemaForSourceFromSourceIdDisableCache(final boolean enabled) @ParameterizedTest @ValueSource(booleans = {true, false}) - void testDiscoverSchemaForSourceFromSourceIdFailed(final boolean enabled) throws IOException, JsonValidationException, ConfigNotFoundException { + void testDiscoverSchemaForSourceFromSourceIdFailed(final boolean enabled) + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()); @@ -934,7 +940,7 @@ void testDiscoverSchemaForSourceFromSourceIdFailed(final boolean enabled) throws @ParameterizedTest @ValueSource(booleans = {true, false}) void whenDiscoverPostprocessInTemporalEnabledDiffAndDisablingIsNotPerformed(final boolean enabled) - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { when(featureFlagClient.boolVariation(eq(DiscoverPostprocessInTemporal.INSTANCE), any())).thenReturn(enabled); final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final UUID connectionId1 = UUID.randomUUID(); @@ -989,7 +995,7 @@ void whenDiscoverPostprocessInTemporalEnabledDiffAndDisablingIsNotPerformed(fina .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); when(connectionsHandler.updateSchemaChangesAndAutoDisableConnectionIfNeeded(any(), anyBoolean(), any())).thenReturn( new ConnectionRead().status(ConnectionStatus.INACTIVE)); @@ -1009,7 +1015,7 @@ void whenDiscoverPostprocessInTemporalEnabledDiffAndDisablingIsNotPerformed(fina // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final UUID connectionId = UUID.randomUUID(); final UUID discoveredCatalogId = UUID.randomUUID(); @@ -1055,7 +1061,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); @@ -1070,7 +1076,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreaking() // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlag() - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final UUID connectionId = UUID.randomUUID(); final UUID discoveredCatalogId = UUID.randomUUID(); @@ -1118,7 +1124,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); @@ -1134,7 +1140,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() - throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException { + throws IOException, JsonValidationException, ConfigNotFoundException, InterruptedException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final UUID connectionId = UUID.randomUUID(); final UUID discoveredCatalogId = UUID.randomUUID(); @@ -1181,7 +1187,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); final AirbyteCatalog persistenceCatalog = Jsons.object(actorCatalog.getCatalog(), io.airbyte.protocol.models.AirbyteCatalog.class); @@ -1199,7 +1205,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdBreakingFeatureFlagOn() // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionPreferenceFeatureFlagNoDiff() - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final UUID connectionId = UUID.randomUUID(); final UUID discoveredCatalogId = UUID.randomUUID(); @@ -1241,7 +1247,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); when(connectionsHandler.updateSchemaChangesAndAutoDisableConnectionIfNeeded(any(), anyBoolean(), any())).thenReturn( new ConnectionRead().status(ConnectionStatus.INACTIVE)); @@ -1260,7 +1266,7 @@ void testDiscoverSchemaFromSourceIdWithConnectionIdNonBreakingDisableConnectionP // TODO: to be removed once we swap to new discover flow @Test void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() - throws IOException, JsonValidationException, ConfigNotFoundException { + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final UUID connectionId = UUID.randomUUID(); final UUID connectionId2 = UUID.randomUUID(); @@ -1332,7 +1338,7 @@ void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(discoveredCatalogId); - when(configRepository.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(discoveredCatalogId)).thenReturn(actorCatalog); when(connectionsHandler.updateSchemaChangesAndAutoDisableConnectionIfNeeded(any(), anyBoolean(), any())).thenReturn( new ConnectionRead().status(ConnectionStatus.ACTIVE)); @@ -1349,7 +1355,8 @@ void testDiscoverSchemaForSourceMultipleConnectionsFeatureFlagOn() } @Test - void testDiscoverSchemaFromSourceIdWithConnectionUpdateNonSuccessResponse() throws IOException, JsonValidationException, ConfigNotFoundException { + void testDiscoverSchemaFromSourceIdWithConnectionUpdateNonSuccessResponse() + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final SourceDiscoverSchemaRequestBody request = new SourceDiscoverSchemaRequestBody().sourceId(source.getSourceId()) .connectionId(UUID.randomUUID()).notifySchemaChange(true); @@ -1385,7 +1392,8 @@ void testDiscoverSchemaFromSourceIdWithConnectionUpdateNonSuccessResponse() thro } @Test - void testDiscoverSchemaForSourceFromSourceCreate() throws JsonValidationException, IOException, ConfigNotFoundException { + void testDiscoverSchemaForSourceFromSourceCreate() + throws JsonValidationException, IOException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = new SourceConnection() .withSourceDefinitionId(SOURCE.getSourceDefinitionId()) .withConfiguration(SOURCE.getConfiguration()) @@ -1406,7 +1414,7 @@ void testDiscoverSchemaForSourceFromSourceCreate() throws JsonValidationExceptio .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(UUID.randomUUID()); - when(configRepository.getActorCatalogById(any())).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(any())).thenReturn(actorCatalog); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() .withSourceDefinitionId(source.getSourceDefinitionId()); @@ -1436,7 +1444,8 @@ void testDiscoverSchemaForSourceFromSourceCreate() throws JsonValidationExceptio } @Test - void testDiscoverSchemaForSourceFromSourceCreateFailed() throws JsonValidationException, IOException, ConfigNotFoundException { + void testDiscoverSchemaForSourceFromSourceCreateFailed() + throws JsonValidationException, IOException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = new SourceConnection() .withSourceDefinitionId(SOURCE.getSourceDefinitionId()) .withConfiguration(SOURCE.getConfiguration()); @@ -1651,7 +1660,8 @@ void testCancelJob() throws IOException { } @Test - void testAutoPropagateSchemaChangeAddStream() throws IOException, ConfigNotFoundException, JsonValidationException { + void testAutoPropagateSchemaChangeAddStream() + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Verify that if auto propagation is fully enabled, a new stream can be added. final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1693,7 +1703,8 @@ void testAutoPropagateSchemaChangeAddStream() throws IOException, ConfigNotFound } @Test - void testAutoPropagateSchemaChangeUpdateStream() throws IOException, ConfigNotFoundException, JsonValidationException { + void testAutoPropagateSchemaChangeUpdateStream() + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Verify that if auto propagation is fully enabled, an existing stream can be modified. final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1732,7 +1743,8 @@ void testAutoPropagateSchemaChangeUpdateStream() throws IOException, ConfigNotFo } @Test - void testAutoPropagateSchemaChangeRemoveStream() throws IOException, ConfigNotFoundException, JsonValidationException { + void testAutoPropagateSchemaChangeRemoveStream() + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Verify that if auto propagation is fully enabled, an existing stream can be removed. final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1771,7 +1783,8 @@ void testAutoPropagateSchemaChangeRemoveStream() throws IOException, ConfigNotFo } @Test - void testAutoPropagateSchemaChangeColumnsOnly() throws IOException, ConfigNotFoundException, JsonValidationException { + void testAutoPropagateSchemaChangeColumnsOnly() + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Verify that if auto propagation is set to PROPAGATE_COLUMNS, then column changes are applied but // a new stream is ignored. final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); @@ -1812,7 +1825,8 @@ void testAutoPropagateSchemaChangeColumnsOnly() throws IOException, ConfigNotFou } @Test - void testAutoPropagateSchemaChangeWithIgnoreMode() throws IOException, ConfigNotFoundException, JsonValidationException { + void testAutoPropagateSchemaChangeWithIgnoreMode() + throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() @@ -1847,7 +1861,8 @@ void testAutoPropagateSchemaChangeWithIgnoreMode() throws IOException, ConfigNot } @Test - void testAutoPropagateSchemaChangeEarlyExits() throws JsonValidationException, ConfigNotFoundException, IOException { + void testAutoPropagateSchemaChangeEarlyExits() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { SourceAutoPropagateChange request = getMockedSourceAutoPropagateChange().sourceId(null); schedulerHandler.applySchemaChangeForSource(request); verifyNoInteractions(connectionsHandler); @@ -1867,7 +1882,8 @@ void testAutoPropagateSchemaChangeEarlyExits() throws JsonValidationException, C } @Test - void testSchemaPropagatedEmptyDiff() throws IOException, JsonValidationException, ConfigNotFoundException { + void testSchemaPropagatedEmptyDiff() + throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID sourceId = UUID.randomUUID(); final UUID workspaceId = UUID.randomUUID(); @@ -1919,7 +1935,8 @@ void testSchemaPropagatedEmptyDiff() throws IOException, JsonValidationException } @Test - void testEmptyDiffIsAlwaysPropagated() throws JsonValidationException, ConfigNotFoundException, IOException { + void testEmptyDiffIsAlwaysPropagated() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { // Verify that if auto propagation is fully enabled, a new stream can be added. final SourceConnection source = SourceHelpers.generateSource(UUID.randomUUID()); final StandardSourceDefinition sourceDefinition = new StandardSourceDefinition() @@ -1977,7 +1994,7 @@ private void mockSourceForDiscoverJob(final SourceConnection source, final Stand } private UUID mockSuccessfulDiscoverJob(final SourceConnection source, final ActorDefinitionVersion sourceVersion) - throws ConfigNotFoundException, IOException { + throws ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newSourceCatalogId = UUID.randomUUID(); final SynchronousResponse discoverResponse = (SynchronousResponse) jobResponse; final SynchronousJobMetadata metadata = mock(SynchronousJobMetadata.class); @@ -1987,7 +2004,7 @@ private UUID mockSuccessfulDiscoverJob(final SourceConnection source, final Acto .withCatalog(Jsons.jsonNode(airbyteCatalog)) .withCatalogHash("") .withId(newSourceCatalogId); - when(configRepository.getActorCatalogById(any())).thenReturn(actorCatalog); + when(catalogService.getActorCatalogById(any())).thenReturn(actorCatalog); when(discoverResponse.getMetadata()).thenReturn(metadata); when(metadata.isSucceeded()).thenReturn(true); when(synchronousSchedulerClient.createDiscoverSchemaJob(source, sourceVersion, false, null, WorkloadPriority.HIGH)) diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java index abb8f5111a0..9368767b5d1 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/SourceHandlerTest.java @@ -55,6 +55,7 @@ import io.airbyte.config.secrets.SecretCoordinate; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; @@ -110,11 +111,13 @@ class SourceHandlerTest { private WorkspaceService workspaceService; private SecretPersistenceConfigService secretPersistenceConfigService; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; + private CatalogService catalogService; @SuppressWarnings("unchecked") @BeforeEach void setUp() throws IOException { configRepository = mock(ConfigRepository.class); + catalogService = mock(CatalogService.class); secretsRepositoryReader = mock(SecretsRepositoryReader.class); validator = mock(JsonSchemaValidator.class); connectionsHandler = mock(ConnectionsHandler.class); @@ -154,6 +157,7 @@ void setUp() throws IOException { sourceConnection = SourceHelpers.generateSource(standardSourceDefinition.getSourceDefinitionId()); sourceHandler = new SourceHandler(configRepository, + catalogService, secretsRepositoryReader, validator, connectionsHandler, @@ -519,10 +523,10 @@ void testWriteDiscoverCatalogResult() throws JsonValidationException, IOExceptio .connectorVersion(connectorVersion) .configurationHash(hashValue); - when(configRepository.writeActorCatalogFetchEvent(expectedCatalog, actorId, connectorVersion, hashValue)).thenReturn(catalogId); + when(catalogService.writeActorCatalogFetchEvent(expectedCatalog, actorId, connectorVersion, hashValue)).thenReturn(catalogId); final DiscoverCatalogResult result = sourceHandler.writeDiscoverCatalogResult(request); - verify(configRepository).writeActorCatalogFetchEvent(expectedCatalog, actorId, connectorVersion, hashValue); + verify(catalogService).writeActorCatalogFetchEvent(expectedCatalog, actorId, connectorVersion, hashValue); assert (result.getCatalogId()).equals(catalogId); } diff --git a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java index 2c4f899c87b..ae25db7c5d3 100644 --- a/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java +++ b/airbyte-commons-server/src/test/java/io/airbyte/commons/server/handlers/WebBackendConnectionsHandlerTest.java @@ -108,6 +108,7 @@ import io.airbyte.config.secrets.JsonSecretsProcessor; import io.airbyte.config.secrets.SecretsRepositoryReader; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.SecretPersistenceConfigService; @@ -162,6 +163,7 @@ class WebBackendConnectionsHandlerTest { private WebBackendConnectionRead expectedNoDiscoveryWithNewSchema; private EventRunner eventRunner; private ConfigRepository configRepository; + private CatalogService catalogService; private ConnectionService connectionService; private ActorDefinitionVersionHelper actorDefinitionVersionHelper; private ActorDefinitionHandlerHelper actorDefinitionHandlerHelper; @@ -178,13 +180,14 @@ class WebBackendConnectionsHandlerTest { private static final String ICON_URL = "https://connectors.airbyte.com/files/metadata/airbyte/destination-test/latest/icon.svg"; @BeforeEach - void setup() throws IOException, JsonValidationException, ConfigNotFoundException { + void setup() throws IOException, JsonValidationException, ConfigNotFoundException, io.airbyte.data.exceptions.ConfigNotFoundException { actorDefinitionVersionHandler = mock(ActorDefinitionVersionHandler.class); connectionsHandler = mock(ConnectionsHandler.class); stateHandler = mock(StateHandler.class); operationsHandler = mock(OperationsHandler.class); final JobHistoryHandler jobHistoryHandler = mock(JobHistoryHandler.class); configRepository = mock(ConfigRepository.class); + catalogService = mock(CatalogService.class); connectionService = mock(ConnectionService.class); schedulerHandler = mock(SchedulerHandler.class); eventRunner = mock(EventRunner.class); @@ -218,7 +221,9 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio actorDefinitionHandlerHelper, actorDefinitionVersionUpdater); - final SourceHandler sourceHandler = new SourceHandler(configRepository, + final SourceHandler sourceHandler = new SourceHandler( + configRepository, + catalogService, secretsRepositoryReader, validator, connectionsHandler, @@ -226,7 +231,11 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio secretsProcessor, configurationUpdate, oAuthConfigSupplier, - actorDefinitionVersionHelper, featureFlagClient, sourceService, workspaceService, secretPersistenceConfigService, + actorDefinitionVersionHelper, + featureFlagClient, + sourceService, + workspaceService, + secretPersistenceConfigService, actorDefinitionHandlerHelper, actorDefinitionVersionUpdater); @@ -241,6 +250,7 @@ void setup() throws IOException, JsonValidationException, ConfigNotFoundExceptio operationsHandler, eventRunner, configRepository, + catalogService, connectionService, actorDefinitionVersionHelper, fieldGenerator, @@ -518,9 +528,9 @@ WebBackendConnectionRead testWebBackendGetConnection(final boolean withCatalogRe void testWebBackendGetConnectionWithDiscoveryAndNewSchema() throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newCatalogId = UUID.randomUUID(); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); + when(catalogService.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); final SourceDiscoverSchemaRead schemaRead = new SourceDiscoverSchemaRead().catalogDiff(expectedWithNewSchema.getCatalogDiff()).catalog(expectedWithNewSchema.getSyncCatalog()) .breakingChange(false).connectionStatus(ConnectionStatus.ACTIVE); @@ -536,9 +546,9 @@ void testWebBackendGetConnectionWithDiscoveryAndNewSchema() throws ConfigNotFoun void testWebBackendGetConnectionWithDiscoveryAndNewSchemaBreakingChange() throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newCatalogId = UUID.randomUUID(); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); + when(catalogService.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); final SourceDiscoverSchemaRead schemaRead = new SourceDiscoverSchemaRead().catalogDiff(expectedWithNewSchema.getCatalogDiff()).catalog(expectedWithNewSchema.getSyncCatalog()) .breakingChange(true).connectionStatus(ConnectionStatus.INACTIVE); @@ -555,9 +565,9 @@ void testWebBackendGetConnectionWithDiscoveryAndNewSchemaBreakingChange() throws void testWebBackendGetConnectionWithDiscoveryMissingCatalogUsedToMakeConfiguredCatalog() throws IOException, ConfigNotFoundException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { final UUID newCatalogId = UUID.randomUUID(); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(newCatalogId))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); + when(catalogService.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); final SourceDiscoverSchemaRead schemaRead = new SourceDiscoverSchemaRead().catalogDiff(expectedWithNewSchema.getCatalogDiff()).catalog(expectedWithNewSchema.getSyncCatalog()) .breakingChange(false).connectionStatus(ConnectionStatus.ACTIVE); @@ -573,7 +583,7 @@ void testWebBackendGetConnectionWithDiscoveryMissingCatalogUsedToMakeConfiguredC void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionAddField() throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Mock this because the API uses it to determine whether there was a schema change. - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); // Original configured catalog has two fields, and only one of them is selected. @@ -626,7 +636,7 @@ void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionAddField() throws void testWebBackendGetConnectionWithDiscoveryAndFieldSelectionRemoveField() throws ConfigNotFoundException, IOException, JsonValidationException, io.airbyte.data.exceptions.ConfigNotFoundException { // Mock this because the API uses it to determine whether there was a schema change. - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); // Original configured catalog has two fields, and both of them are selected. @@ -681,9 +691,9 @@ void testWebBackendGetConnectionNoRefreshCatalog() @Test void testWebBackendGetConnectionNoDiscoveryWithNewSchema() throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); + when(catalogService.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); final WebBackendConnectionRead result = testWebBackendGetConnection(false, connectionRead, operationReadList); assertEquals(expectedNoDiscoveryWithNewSchema, result); } @@ -692,9 +702,9 @@ void testWebBackendGetConnectionNoDiscoveryWithNewSchema() void testWebBackendGetConnectionNoDiscoveryWithNewSchemaBreaking() throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { when(connectionsHandler.getConnection(brokenConnectionRead.getConnectionId())).thenReturn(brokenConnectionRead); - when(configRepository.getMostRecentActorCatalogFetchEventForSource(any())) + when(catalogService.getMostRecentActorCatalogFetchEventForSource(any())) .thenReturn(Optional.of(new ActorCatalogFetchEvent().withActorCatalogId(UUID.randomUUID()))); - when(configRepository.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); + when(catalogService.getActorCatalogById(any())).thenReturn(new ActorCatalog().withId(UUID.randomUUID())); final WebBackendConnectionRead result = testWebBackendGetConnection(false, brokenConnectionRead, brokenOperationReadList); assertEquals(expectedWithNewSchemaBroken, result); } @@ -1015,7 +1025,7 @@ void testUpdateConnectionWithUpdatedSchemaPerStream(final Boolean useRefresh) when(eventRunner.resetConnection(any(), any())).thenReturn(successfulResult); when(eventRunner.startNewManualSync(any())).thenReturn(successfulResult); - when(configRepository.getMostRecentActorCatalogForSource(any())).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.emptyObject()))); + when(catalogService.getMostRecentActorCatalogForSource(any())).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.emptyObject()))); final WebBackendConnectionRead result = wbHandler.webBackendUpdateConnection(updateBody); @@ -1164,7 +1174,7 @@ void testUpdateConnectionFixingBreakingSchemaChange() final CatalogDiff catalogDiff = new CatalogDiff().transforms(List.of()); - when(configRepository.getMostRecentActorCatalogForSource(sourceId)).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.deserialize( + when(catalogService.getMostRecentActorCatalogForSource(sourceId)).thenReturn(Optional.of(new ActorCatalog().withCatalog(Jsons.deserialize( "{\"streams\": [{\"name\": \"cat_names\", " + "\"namespace\": \"public\", " + "\"json_schema\": {\"type\": \"object\", \"properties\": {\"id\": {\"type\": \"number\", \"airbyte_type\": \"integer\"}}}}]}")))); diff --git a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java index c22417547ed..15de160dbd7 100644 --- a/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java +++ b/airbyte-config/config-persistence/src/main/java/io/airbyte/config/persistence/ConfigRepository.java @@ -7,9 +7,6 @@ import com.google.common.annotations.VisibleForTesting; import datadog.trace.api.Trace; import io.airbyte.commons.version.Version; -import io.airbyte.config.ActorCatalog; -import io.airbyte.config.ActorCatalogFetchEvent; -import io.airbyte.config.ActorCatalogWithUpdatedAt; import io.airbyte.config.ActorDefinitionBreakingChange; import io.airbyte.config.ActorDefinitionConfigInjection; import io.airbyte.config.ActorDefinitionVersion; @@ -25,14 +22,12 @@ import io.airbyte.config.StreamDescriptor; import io.airbyte.config.WorkspaceServiceAccount; import io.airbyte.data.services.ActorDefinitionService; -import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.ConnectorBuilderService; import io.airbyte.data.services.DestinationService; import io.airbyte.data.services.OperationService; import io.airbyte.data.services.SourceService; import io.airbyte.data.services.WorkspaceService; -import io.airbyte.protocol.models.AirbyteCatalog; import io.airbyte.validation.json.JsonValidationException; import jakarta.annotation.Nonnull; import java.io.IOException; @@ -133,7 +128,6 @@ public record ResourcesByUserQueryPaginated( int rowOffset) {} private final ActorDefinitionService actorDefinitionService; - private final CatalogService catalogService; private final ConnectionService connectionService; private final ConnectorBuilderService connectorBuilderService; private final DestinationService destinationService; @@ -144,7 +138,6 @@ public record ResourcesByUserQueryPaginated( @SuppressWarnings("ParameterName") @VisibleForTesting public ConfigRepository(final ActorDefinitionService actorDefinitionService, - final CatalogService catalogService, final ConnectionService connectionService, final ConnectorBuilderService connectorBuilderService, final DestinationService destinationService, @@ -152,7 +145,6 @@ public ConfigRepository(final ActorDefinitionService actorDefinitionService, final SourceService sourceService, final WorkspaceService workspaceService) { this.actorDefinitionService = actorDefinitionService; - this.catalogService = catalogService; this.connectionService = connectionService; this.connectorBuilderService = connectorBuilderService; this.destinationService = destinationService; @@ -1261,120 +1253,6 @@ public List getDestinationAndDefinitionsFromDestinatio .toList(); } - /** - * Get actor catalog. - * - * @param actorCatalogId actor catalog id - * @return actor catalog - * @throws ConfigNotFoundException if the config does not exist - * @throws IOException if there is an issue while interacting with db. - */ - @Deprecated - public ActorCatalog getActorCatalogById(final UUID actorCatalogId) - throws IOException, ConfigNotFoundException { - try { - return catalogService.getActorCatalogById(actorCatalogId); - } catch (final io.airbyte.data.exceptions.ConfigNotFoundException e) { - throw new ConfigNotFoundException(e.getType(), e.getConfigId()); - } - } - - /** - * Get most actor catalog for source. - * - * @param actorId actor id - * @param actorVersion actor definition version used to make this actor - * @param configHash config hash for actor - * @return actor catalog for config has and actor version - * @throws IOException - error while interacting with db - */ - @Deprecated - public Optional getActorCatalog(final UUID actorId, - final String actorVersion, - final String configHash) - throws IOException { - return catalogService.getActorCatalog(actorId, actorVersion, configHash); - } - - /** - * Get most recent actor catalog for source. - * - * @param sourceId source id - * @return current actor catalog with updated at - * @throws IOException - error while interacting with db - */ - @Deprecated - public Optional getMostRecentSourceActorCatalog(final UUID sourceId) throws IOException { - return catalogService.getMostRecentSourceActorCatalog(sourceId); - } - - /** - * Get most recent actor catalog for source. - * - * @param sourceId source id - * @return current actor catalog - * @throws IOException - error while interacting with db - */ - @Deprecated - public Optional getMostRecentActorCatalogForSource(final UUID sourceId) throws IOException { - return catalogService.getMostRecentActorCatalogForSource(sourceId); - } - - /** - * Get most recent actor catalog fetch event for source. - * - * @param sourceId source id - * @return last actor catalog fetch event - * @throws IOException - error while interacting with db - */ - @Deprecated - public Optional getMostRecentActorCatalogFetchEventForSource(final UUID sourceId) throws IOException { - return catalogService.getMostRecentActorCatalogFetchEventForSource(sourceId); - } - - /** - * Get most recent actor catalog fetch event for sources. - * - * @param sourceIds source ids - * @return map of source id to the last actor catalog fetch event - * @throws IOException - error while interacting with db - */ - @SuppressWarnings({"unused", "SqlNoDataSourceInspection"}) - @Deprecated - public Map getMostRecentActorCatalogFetchEventForSources(final List sourceIds) throws IOException { - return catalogService.getMostRecentActorCatalogFetchEventForSources(sourceIds); - } - - /** - * Stores source catalog information. - *

- * This function is called each time the schema of a source is fetched. This can occur because the - * source is set up for the first time, because the configuration or version of the connector - * changed or because the user explicitly requested a schema refresh. Schemas are stored separately - * and de-duplicated upon insertion. Once a schema has been successfully stored, a call to - * getActorCatalog(actorId, connectionVersion, configurationHash) will return the most recent schema - * stored for those parameters. - * - * @param catalog - catalog that was fetched. - * @param actorId - actor the catalog was fetched by - * @param connectorVersion - version of the connector when catalog was fetched - * @param configurationHash - hash of the config of the connector when catalog was fetched - * @return The identifier (UUID) of the fetch event inserted in the database - * @throws IOException - error while interacting with db - */ - @Deprecated - public UUID writeActorCatalogFetchEvent(final AirbyteCatalog catalog, - final UUID actorId, - final String connectorVersion, - final String configurationHash) - throws IOException { - return catalogService.writeActorCatalogFetchEvent( - catalog, - actorId, - connectorVersion, - configurationHash); - } - /** * Count connections in workspace. * diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java index b1151efed2c..837fce04721 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionBreakingChangePersistenceTest.java @@ -25,7 +25,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -131,7 +130,6 @@ void setup() throws SQLException, JsonValidationException, IOException { configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java index 73ae6900ba4..224e4598987 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionPersistenceTest.java @@ -33,7 +33,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; @@ -88,7 +87,6 @@ void setup() throws SQLException, IOException { configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java index c7ac4a5361c..1e7ddfb898d 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ActorDefinitionVersionPersistenceTest.java @@ -33,7 +33,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -128,7 +127,6 @@ void beforeEach() throws Exception { configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java index 5087f34de79..53652e99039 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigInjectionTest.java @@ -25,7 +25,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -70,7 +69,6 @@ void beforeEach() throws Exception { new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java index 33c98945889..45eeca87086 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConfigRepositoryE2EReadWriteTest.java @@ -43,6 +43,7 @@ import io.airbyte.config.secrets.SecretsRepositoryWriter; import io.airbyte.data.helpers.ActorDefinitionVersionUpdater; import io.airbyte.data.services.ActorDefinitionService; +import io.airbyte.data.services.CatalogService; import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.OAuthService; import io.airbyte.data.services.OrganizationService; @@ -100,6 +101,7 @@ class ConfigRepositoryE2EReadWriteTest extends BaseConfigDatabaseTest { private static final String CONFIG_HASH = "ConfigHash"; private ConfigRepository configRepository; + private CatalogService catalogService; private OAuthService oauthService; @BeforeEach @@ -121,7 +123,6 @@ void setup() throws IOException, JsonValidationException, SQLException { configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, @@ -144,6 +145,7 @@ void setup() throws IOException, JsonValidationException, SQLException { secretsRepositoryReader, secretsRepositoryWriter, secretPersistenceConfigService))); + catalogService = spy(new CatalogServiceJooqImpl(database)); OrganizationService organizationService = new OrganizationServiceJooqImpl(database); organizationService.writeOrganization(MockData.defaultOrganization()); oauthService = spy(new OAuthServiceJooqImpl(database, @@ -243,12 +245,12 @@ void testReadActorCatalog() throws IOException, JsonValidationException, SQLExce final AirbyteCatalog firstCatalog = CatalogHelpers.createAirbyteCatalog("product", Field.of("label", JsonSchemaType.STRING), Field.of("size", JsonSchemaType.NUMBER), Field.of("color", JsonSchemaType.STRING), Field.of("price", JsonSchemaType.NUMBER)); - configRepository.writeActorCatalogFetchEvent(firstCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); + catalogService.writeActorCatalogFetchEvent(firstCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); final AirbyteCatalog secondCatalog = CatalogHelpers.createAirbyteCatalog("product", Field.of("size", JsonSchemaType.NUMBER), Field.of("label", JsonSchemaType.STRING), Field.of("color", JsonSchemaType.STRING), Field.of("price", JsonSchemaType.NUMBER)); - configRepository.writeActorCatalogFetchEvent(secondCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); + catalogService.writeActorCatalogFetchEvent(secondCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); final String expectedCatalog = "{" @@ -271,7 +273,7 @@ void testReadActorCatalog() throws IOException, JsonValidationException, SQLExce + "]" + "}"; - final Optional catalogResult = configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); + final Optional catalogResult = catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); assertTrue(catalogResult.isPresent()); assertEquals(expectedCatalog, Jsons.serialize(catalogResult.get().getCatalog())); } @@ -301,7 +303,7 @@ void testWriteCanonicalHashActorCatalog() throws IOException, JsonValidationExce final AirbyteCatalog firstCatalog = CatalogHelpers.createAirbyteCatalog("product", Field.of("label", JsonSchemaType.STRING), Field.of("size", JsonSchemaType.NUMBER), Field.of("color", JsonSchemaType.STRING), Field.of("price", JsonSchemaType.NUMBER)); - configRepository.writeActorCatalogFetchEvent(firstCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); + catalogService.writeActorCatalogFetchEvent(firstCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); final String expectedCatalog = "{" @@ -324,7 +326,7 @@ void testWriteCanonicalHashActorCatalog() throws IOException, JsonValidationExce + "]" + "}"; - final Optional catalogResult = configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); + final Optional catalogResult = catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); assertTrue(catalogResult.isPresent()); assertEquals(catalogResult.get().getCatalogHash(), canonicalConfigHash); assertEquals(expectedCatalog, Jsons.canonicalJsonSerialize(catalogResult.get().getCatalog())); @@ -354,25 +356,25 @@ void testSimpleInsertActorCatalog() throws IOException, SQLException { final AirbyteCatalog actorCatalog = CatalogHelpers.createAirbyteCatalog("clothes", Field.of("name", JsonSchemaType.STRING)); final AirbyteCatalog expectedActorCatalog = CatalogHelpers.createAirbyteCatalog("clothes", Field.of("name", JsonSchemaType.STRING)); - configRepository.writeActorCatalogFetchEvent( + catalogService.writeActorCatalogFetchEvent( actorCatalog, source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); final Optional catalog = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); + catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, CONFIG_HASH); assertTrue(catalog.isPresent()); assertEquals(expectedActorCatalog, Jsons.object(catalog.get().getCatalog(), AirbyteCatalog.class)); - assertFalse(configRepository.getActorCatalog(source.getSourceId(), "1.3.0", CONFIG_HASH).isPresent()); - assertFalse(configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash).isPresent()); + assertFalse(catalogService.getActorCatalog(source.getSourceId(), "1.3.0", CONFIG_HASH).isPresent()); + assertFalse(catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash).isPresent()); - configRepository.writeActorCatalogFetchEvent(actorCatalog, source.getSourceId(), "1.3.0", CONFIG_HASH); + catalogService.writeActorCatalogFetchEvent(actorCatalog, source.getSourceId(), "1.3.0", CONFIG_HASH); final Optional catalogNewConnectorVersion = - configRepository.getActorCatalog(source.getSourceId(), "1.3.0", CONFIG_HASH); + catalogService.getActorCatalog(source.getSourceId(), "1.3.0", CONFIG_HASH); assertTrue(catalogNewConnectorVersion.isPresent()); assertEquals(expectedActorCatalog, Jsons.object(catalogNewConnectorVersion.get().getCatalog(), AirbyteCatalog.class)); - configRepository.writeActorCatalogFetchEvent(actorCatalog, source.getSourceId(), "1.2.0", otherConfigHash); + catalogService.writeActorCatalogFetchEvent(actorCatalog, source.getSourceId(), "1.2.0", otherConfigHash); final Optional catalogNewConfig = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); + catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); assertTrue(catalogNewConfig.isPresent()); assertEquals(expectedActorCatalog, Jsons.object(catalogNewConfig.get().getCatalog(), AirbyteCatalog.class)); @@ -380,15 +382,15 @@ void testSimpleInsertActorCatalog() throws IOException, SQLException { assertEquals(1, catalogDbEntry); // Writing the previous catalog with v1 data types - configRepository.writeActorCatalogFetchEvent(expectedActorCatalog, source.getSourceId(), "1.2.0", otherConfigHash); + catalogService.writeActorCatalogFetchEvent(expectedActorCatalog, source.getSourceId(), "1.2.0", otherConfigHash); final Optional catalogV1NewConfig = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); + catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); assertTrue(catalogV1NewConfig.isPresent()); assertEquals(expectedActorCatalog, Jsons.object(catalogNewConfig.get().getCatalog(), AirbyteCatalog.class)); - configRepository.writeActorCatalogFetchEvent(expectedActorCatalog, source.getSourceId(), "1.4.0", otherConfigHash); + catalogService.writeActorCatalogFetchEvent(expectedActorCatalog, source.getSourceId(), "1.4.0", otherConfigHash); final Optional catalogV1again = - configRepository.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); + catalogService.getActorCatalog(source.getSourceId(), DOCKER_IMAGE_TAG, otherConfigHash); assertTrue(catalogV1again.isPresent()); assertEquals(expectedActorCatalog, Jsons.object(catalogNewConfig.get().getCatalog(), AirbyteCatalog.class)); @@ -828,7 +830,7 @@ void testGetMostRecentActorCatalogFetchEventForSource() throws SQLException, IOE }); final Optional result = - configRepository.getMostRecentActorCatalogFetchEventForSource(fetchEvent1.getActorId()); + catalogService.getMostRecentActorCatalogFetchEventForSource(fetchEvent1.getActorId()); assertEquals(fetchEvent2.getActorCatalogId(), result.get().getActorCatalogId()); } @@ -850,12 +852,12 @@ void testGetMostRecentActorCatalogFetchEventForSources() throws SQLException, IO }); final Map result = - configRepository.getMostRecentActorCatalogFetchEventForSources(List.of(MockData.SOURCE_ID_1, + catalogService.getMostRecentActorCatalogFetchEventForSources(List.of(MockData.SOURCE_ID_1, MockData.SOURCE_ID_2)); assertEquals(MockData.ACTOR_CATALOG_ID_1, result.get(MockData.SOURCE_ID_1).getActorCatalogId()); assertEquals(MockData.ACTOR_CATALOG_ID_3, result.get(MockData.SOURCE_ID_2).getActorCatalogId()); - assertEquals(0, configRepository.getMostRecentActorCatalogFetchEventForSources(Collections.emptyList()).size()); + assertEquals(0, catalogService.getMostRecentActorCatalogFetchEventForSources(Collections.emptyList()).size()); } @Test @@ -884,7 +886,7 @@ void testGetMostRecentActorCatalogFetchEventWithDuplicates() throws SQLException }); final Map result = - configRepository.getMostRecentActorCatalogFetchEventForSources(List.of(MockData.SOURCE_ID_1, + catalogService.getMostRecentActorCatalogFetchEventForSources(List.of(MockData.SOURCE_ID_1, MockData.SOURCE_ID_2)); assertEquals(MockData.ACTOR_CATALOG_ID_1, result.get(MockData.SOURCE_ID_1).getActorCatalogId()); diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java index 203a5c759eb..126c5ab160b 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/ConnectorMetadataPersistenceTest.java @@ -40,7 +40,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -101,7 +100,6 @@ void setup() throws SQLException, JsonValidationException, IOException { spy(new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService)); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java index ef5391ba428..6f04617a31e 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/PermissionPersistenceTest.java @@ -19,7 +19,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -59,7 +58,6 @@ private void setupTestData() throws Exception { final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); final ConfigRepository configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java index 4a41eac7398..d510160e1c3 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StandardSyncPersistenceTest.java @@ -51,7 +51,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; @@ -114,7 +113,6 @@ void beforeEach() throws Exception { new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java index 7adbf293d46..747b6eea164 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/StatePersistenceTest.java @@ -29,7 +29,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectionServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; @@ -94,7 +93,6 @@ private UUID setupTestData() throws JsonValidationException, IOException { new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); final ConfigRepository configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, @@ -922,7 +920,6 @@ private UUID setupSecondConnection() throws JsonValidationException, IOException new ActorDefinitionVersionUpdater(featureFlagClient, connectionService, actorDefinitionService, scopedConfigurationService); final ConfigRepository configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java index c3afd6f8c58..a53c2f4ae9a 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/SyncOperationPersistenceTest.java @@ -21,7 +21,6 @@ import io.airbyte.data.services.OrganizationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -73,7 +72,6 @@ void beforeEach() throws Exception { final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java index 8eb353b8302..1a1ebc373c3 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/UserPersistenceTest.java @@ -23,7 +23,6 @@ import io.airbyte.data.services.OrganizationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -65,7 +64,6 @@ void setup() { final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java index e93b37e89c6..81696ee5c7a 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspaceFilterTest.java @@ -20,7 +20,6 @@ import io.airbyte.data.services.ConnectionService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -142,7 +141,6 @@ void beforeEach() { final ActorDefinitionVersionUpdater actorDefinitionVersionUpdater = mock(ActorDefinitionVersionUpdater.class); configRepository = new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java index e21b6cba216..31c1e665dd9 100644 --- a/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java +++ b/airbyte-config/config-persistence/src/test/java/io/airbyte/config/persistence/WorkspacePersistenceTest.java @@ -39,7 +39,6 @@ import io.airbyte.data.services.ScopedConfigurationService; import io.airbyte.data.services.SecretPersistenceConfigService; import io.airbyte.data.services.impls.jooq.ActorDefinitionServiceJooqImpl; -import io.airbyte.data.services.impls.jooq.CatalogServiceJooqImpl; import io.airbyte.data.services.impls.jooq.ConnectorBuilderServiceJooqImpl; import io.airbyte.data.services.impls.jooq.DestinationServiceJooqImpl; import io.airbyte.data.services.impls.jooq.OperationServiceJooqImpl; @@ -94,7 +93,6 @@ void setup() throws Exception { configRepository = spy( new ConfigRepository( new ActorDefinitionServiceJooqImpl(database), - new CatalogServiceJooqImpl(database), connectionService, new ConnectorBuilderServiceJooqImpl(database), new DestinationServiceJooqImpl(database, diff --git a/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java b/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java index 639b7ab8f63..0ce4175b79f 100644 --- a/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java +++ b/airbyte-cron/src/main/java/io/airbyte/cron/config/DatabaseBeanFactory.java @@ -96,7 +96,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin final WorkspaceService workspaceService) { return new ConfigRepository( actorDefinitionService, - catalogService, connectionService, connectorBuilderService, destinationService, diff --git a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java index 067c949b610..32e5b3d11f6 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java +++ b/airbyte-server/src/main/java/io/airbyte/server/config/DatabaseBeanFactory.java @@ -105,7 +105,6 @@ public ConfigRepository configRepository(final ActorDefinitionService actorDefin final WorkspaceService workspaceService) { return new ConfigRepository( actorDefinitionService, - catalogService, connectionService, connectorBuilderService, destinationService, diff --git a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java b/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java index c72efc8280b..7a2747fca4b 100644 --- a/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java +++ b/airbyte-server/src/test/java/io/airbyte/server/apis/SourceApiTest.java @@ -106,7 +106,8 @@ void testDeleteSource() throws JsonValidationException, ConfigNotFoundException, } @Test - void testDiscoverSchemaForSource() throws JsonValidationException, ConfigNotFoundException, IOException { + void testDiscoverSchemaForSource() + throws JsonValidationException, ConfigNotFoundException, IOException, io.airbyte.data.exceptions.ConfigNotFoundException { Mockito.when(schedulerHandler.discoverSchemaForSourceFromSourceId(Mockito.any())) .thenReturn(new SourceDiscoverSchemaRead()) .thenThrow(new ConfigNotFoundException("", "")); From 4056b961abbf618f5d499d2f953173b8ae774d87 Mon Sep 17 00:00:00 2001 From: Natik Gadzhi Date: Mon, 30 Sep 2024 14:39:13 -0700 Subject: [PATCH 23/36] fix(contrib-flow): include pull request column in new connector docs (#14168) --- .../src/main/resources/contribution_templates/docs.md.peb | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/airbyte-connector-builder-server/src/main/resources/contribution_templates/docs.md.peb b/airbyte-connector-builder-server/src/main/resources/contribution_templates/docs.md.peb index 764d85a4d26..0d89cf18805 100644 --- a/airbyte-connector-builder-server/src/main/resources/contribution_templates/docs.md.peb +++ b/airbyte-connector-builder-server/src/main/resources/contribution_templates/docs.md.peb @@ -23,8 +23,8 @@

Expand to review -| Version | Date | Subject | -|------------------|------------|----------------| -| {{ versionTag }} | {{ releaseDate }} | {{ changelogMessage }}| +| Version | Date | Pull Request | Subject | +|------------------|-------------------|--------------|----------------| +| {{ versionTag }} | {{ releaseDate }} | | {{ changelogMessage }} | -
\ No newline at end of file + From 8a6aaeac56e1f6e37f5655ee2a2a160966d524f2 Mon Sep 17 00:00:00 2001 From: Lake Mossman Date: Mon, 30 Sep 2024 15:18:03 -0700 Subject: [PATCH 24/36] chore: remove non-publish references to contribute feature flag (#14163) --- .../ConnectorBuilderProjectTable.tsx | 6 ------ .../connectorBuilder/BaseConnectorInfo.tsx | 6 ------ .../components/ForkConnectorButton.tsx | 14 +++++++------- .../useBuilderCompatibleSourceDefinitions.tsx | 9 ++------- 4 files changed, 9 insertions(+), 26 deletions(-) diff --git a/airbyte-webapp/src/components/ConnectorBuilderProjectTable/ConnectorBuilderProjectTable.tsx b/airbyte-webapp/src/components/ConnectorBuilderProjectTable/ConnectorBuilderProjectTable.tsx index 5717de95809..ec7bc65c7d3 100644 --- a/airbyte-webapp/src/components/ConnectorBuilderProjectTable/ConnectorBuilderProjectTable.tsx +++ b/airbyte-webapp/src/components/ConnectorBuilderProjectTable/ConnectorBuilderProjectTable.tsx @@ -27,7 +27,6 @@ import { ContributionInfo } from "core/api/types/AirbyteClient"; import { Action, Namespace, useAnalyticsService } from "core/services/analytics"; import { useIntent } from "core/utils/rbac"; import { useConfirmationModalService } from "hooks/services/ConfirmationModal"; -import { useExperiment } from "hooks/services/Experiment"; import { useModalService } from "hooks/services/Modal"; import { useNotificationService } from "hooks/services/Notification"; import { getEditPath } from "pages/connectorBuilder/ConnectorBuilderRoutes"; @@ -346,11 +345,6 @@ const ContributionInfoDisplay: React.FC = ({ actorDefinitionId // list instead of fetching definition individually to reuse cached request and avoid 404 for net-new definitions const sourceDefinition = useSourceDefinitionList().sourceDefinitionMap.get(actorDefinitionId); - const isContributeEditsEnabled = useExperiment("connectorBuilder.contributeEditsToMarketplace"); - if (!isContributeEditsEnabled) { - return null; - } - return ( diff --git a/airbyte-webapp/src/components/connectorBuilder/BaseConnectorInfo.tsx b/airbyte-webapp/src/components/connectorBuilder/BaseConnectorInfo.tsx index 4095644422a..40b0869e527 100644 --- a/airbyte-webapp/src/components/connectorBuilder/BaseConnectorInfo.tsx +++ b/airbyte-webapp/src/components/connectorBuilder/BaseConnectorInfo.tsx @@ -8,7 +8,6 @@ import { Text } from "components/ui/Text"; import { Tooltip } from "components/ui/Tooltip"; import { BaseActorDefinitionVersionInfo } from "core/api/types/AirbyteClient"; -import { useExperiment } from "hooks/services/Experiment"; import styles from "./BaseConnectorInfo.module.scss"; @@ -27,11 +26,6 @@ export const BaseConnectorInfo: React.FC = ({ name, documentationUrl, }) => { - const isContributeEditsEnabled = useExperiment("connectorBuilder.contributeEditsToMarketplace"); - if (!isContributeEditsEnabled) { - return null; - } - const nameAndVersion = ( { - createForkedProject(sourceDefinition.sourceDefinitionId).then((result) => { - window.open(createProjectEditLink(result.builderProjectId), "_blank"); - }); + createForkedProject(sourceDefinition.sourceDefinitionId).then( + (result: ConnectorBuilderProjectIdWithWorkspaceId) => { + window.open(createProjectEditLink(result.builderProjectId), "_blank"); + } + ); }, [createForkedProject, createProjectEditLink, sourceDefinition]); - const isContributeEditsEnabled = useExperiment("connectorBuilder.contributeEditsToMarketplace"); - if (!sourceDefinition || !isContributeEditsEnabled) { + if (!sourceDefinition) { return null; } diff --git a/airbyte-webapp/src/pages/connectorBuilder/components/useBuilderCompatibleSourceDefinitions.tsx b/airbyte-webapp/src/pages/connectorBuilder/components/useBuilderCompatibleSourceDefinitions.tsx index 0ac50528f9e..6ff73ecc467 100644 --- a/airbyte-webapp/src/pages/connectorBuilder/components/useBuilderCompatibleSourceDefinitions.tsx +++ b/airbyte-webapp/src/pages/connectorBuilder/components/useBuilderCompatibleSourceDefinitions.tsx @@ -2,19 +2,14 @@ import { useMemo } from "react"; import { useSourceDefinitionList } from "core/api"; import { SourceDefinitionRead } from "core/api/types/AirbyteClient"; -import { useExperiment } from "hooks/services/Experiment"; import { BUILDER_COMPATIBLE_CONNECTOR_LANGUAGE } from "../../../components/connectorBuilder/types"; export const useBuilderCompatibleSourceDefinitions = () => { - const isContributeEditsEnabled = useExperiment("connectorBuilder.contributeEditsToMarketplace"); const { sourceDefinitions, sourceDefinitionMap } = useSourceDefinitionList(); const builderCompatibleSourceDefinitions = useMemo( - () => - isContributeEditsEnabled - ? sourceDefinitions.filter((sourceDefinition) => isBuilderCompatible(sourceDefinition)) - : [], - [isContributeEditsEnabled, sourceDefinitions] + () => sourceDefinitions.filter((sourceDefinition) => isBuilderCompatible(sourceDefinition)), + [sourceDefinitions] ); return { builderCompatibleSourceDefinitions, sourceDefinitionMap }; From 8318f14c6d131635fa9d4938754d0c8ebce61f31 Mon Sep 17 00:00:00 2001 From: Natik Gadzhi Date: Mon, 30 Sep 2024 16:10:48 -0700 Subject: [PATCH 25/36] feat(contrib-flow): set allowedHosts in metadata.yaml in Builder contributions (#14169) --- .../templates/ContributionTemplates.kt | 23 ++++++- .../connector_builder/utils/ManifestParser.kt | 4 +- .../templates/ContributionTemplatesTest.kt | 60 ++++++++++++++++++- .../src/test/resources/valid_manifest.yaml | 2 +- 4 files changed, 84 insertions(+), 5 deletions(-) diff --git a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt index d1d9a2aa164..10279838cc3 100644 --- a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt +++ b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/templates/ContributionTemplates.kt @@ -62,6 +62,24 @@ class ContributionTemplates { } ?: emptyList() } + fun getAllowedHosts(streams: List>): List { + val hostnameRegex = Regex("^(?:https?://)?(?:www\\.)?([^/{}]+)") + + val hosts = + streams.mapNotNull { stream -> + val retriever = stream["retriever"] as? Map + val requester = retriever?.get("requester") as? Map + val baseUrl = requester?.get("url_base") as? String + + baseUrl?.let { url -> + hostnameRegex.find(url)?.groupValues?.getOrNull(1) + } + } + + // Since the requester is on every stream, we only need unique hostnames + return hosts.distinct() + } + /** * Converts a primary key to a string representation. * @@ -133,12 +151,15 @@ class ContributionTemplates { contributionInfo: BuilderContributionInfo, githubContributionService: GithubContributionService, ): String { + val manifestParser = ManifestParser(contributionInfo.manifestYaml) + val allowedHosts = getAllowedHosts(manifestParser.streams) + // TODO: Ensure metadata is correctly formatted // TODO: Merge metadata with existing metadata if it exists val context = mapOf( // TODO: Parse Allowed Hosts from manifest - "allowedHosts" to listOf("*"), + "allowedHosts" to allowedHosts, "connectorImageName" to contributionInfo.connectorImageName, "baseImage" to contributionInfo.baseImage, "actorDefinitionId" to contributionInfo.actorDefinitionId, diff --git a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/ManifestParser.kt b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/ManifestParser.kt index 03560d3614f..2f76d2851ff 100644 --- a/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/ManifestParser.kt +++ b/airbyte-connector-builder-server/src/main/kotlin/io/airbyte/connector_builder/utils/ManifestParser.kt @@ -49,13 +49,13 @@ class ManifestParser(rawManifestYaml: String) { /** * Removes escape quote characters from the serialized string - * e.g. \\" -> " + * e.g. \\" -> ' * * Note: This is due to the way the yaml in the FE library serializes escaped strings */ private fun unEscapeQuotes(serializedString: String): String { // Handle escaped quotes in the string - // \\" -> " + // \\" -> ' return serializedString.replace("\\\\\"", "\"") } diff --git a/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt b/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt index 0b775de0566..3088e40cd32 100644 --- a/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt +++ b/airbyte-connector-builder-server/src/test/kotlin/io/airbyte/connector_builder/templates/ContributionTemplatesTest.kt @@ -189,6 +189,64 @@ class ContributionTemplatesTest { assertEquals(contributionTemplates.toTemplateSpecProperties(spec), expectedSpecProperties) } + @Test + fun `test getAllowedHosts`() { + val contributionTemplates = ContributionTemplates() + + val streams = + listOf( + mapOf( + "name" to "stream1", + "retriever" to + mapOf( + "requester" to + mapOf( + "url_base" to "https://api1.example.com/v1/", + ), + ), + ), + mapOf( + "name" to "stream2", + "retriever" to + mapOf( + "requester" to + mapOf( + "url_base" to "http://api2.example.com/v2/{{param}}", + ), + ), + ), + mapOf( + "name" to "stream3", + "retriever" to + mapOf( + "requester" to + mapOf( + "url_base" to "https://api1.example.com/v3/", + ), + ), + ), + mapOf( + "name" to "stream5", + "retriever" to + mapOf( + "requester" to + mapOf( + "url_base" to "https://www.another-api.com/v1/", + ), + ), + ), + ) + + val expectedHosts = + listOf( + "api1.example.com", + "api2.example.com", + "another-api.com", + ) + + assertEquals(expectedHosts, contributionTemplates.getAllowedHosts(streams)) + } + @Test fun `test toTemplateStreams`() { val contributionTemplates = ContributionTemplates() @@ -252,7 +310,7 @@ class ContributionTemplatesTest { |data: | allowedHosts: | hosts: - | - "*" + | - "api.whatahost.com" | registryOverrides: | oss: | enabled: true diff --git a/airbyte-connector-builder-server/src/test/resources/valid_manifest.yaml b/airbyte-connector-builder-server/src/test/resources/valid_manifest.yaml index 669cf03e5ba..b7a2e1edf84 100644 --- a/airbyte-connector-builder-server/src/test/resources/valid_manifest.yaml +++ b/airbyte-connector-builder-server/src/test/resources/valid_manifest.yaml @@ -168,7 +168,7 @@ definitions: schema: $ref: "#/schemas/collections_items_dashboards" base_requester: - url_base: "{{ config['instance_api_url'] }}" + url_base: "api.whatahost.com/{{ config['instance_api_url'] }}/" http_method: "GET" authenticator: type: "LegacySessionTokenAuthenticator" From 1549bc7ebe280aba7f912ae26d9fc81264c103d4 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Tue, 1 Oct 2024 09:51:36 -0400 Subject: [PATCH 26/36] revert: "revert: "refactor: connector rollout api endpoint updates (#14145)" (#14173)" (#14176) --- .../server-api/src/main/openapi/config.yaml | 30 +++++++++++++------ .../handlers/ConnectorRolloutHandler.kt | 8 +++++ .../handlers/ConnectorRolloutHandlerTest.kt | 15 +++++----- .../rollout/client/ConnectorRolloutCLI.kt | 6 ++++ .../ConnectorRolloutActivityInputFinalize.kt | 1 + .../ConnectorRolloutActivityInputRollout.kt | 1 + .../ConnectorRolloutActivityInputStart.kt | 1 + .../activities/DoRolloutActivityImpl.kt | 1 + .../activities/FinalizeRolloutActivityImpl.kt | 1 + .../activities/StartRolloutActivityImpl.kt | 1 + .../apis/ConnectorRolloutApiController.java | 3 +- .../src/main/resources/application.yml | 4 +-- 12 files changed, 52 insertions(+), 20 deletions(-) diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index 756e55ec0b2..e15410859c1 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -5056,12 +5056,6 @@ paths: # Connector Rollouts /v1/connector_rollout/list_all: post: - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/ConnectorRolloutListAllRequestBody" - responses: "200": content: @@ -7368,9 +7362,6 @@ components: type: string format: uuid - ConnectorRolloutListAllRequestBody: - type: object - ConnectorRolloutListByActorDefinitionIdRequestBody: type: object required: @@ -7461,6 +7452,9 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + updated_by: + type: string + format: uuid ConnectorRolloutStartResponse: type: object @@ -7486,6 +7480,9 @@ components: $ref: "#/components/schemas/ActorId" rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + updated_by: + type: string + format: uuid ConnectorRolloutResponse: type: object @@ -7513,6 +7510,9 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" + updated_by: + type: string + format: uuid ConnectorRolloutFinalizeResponse: type: object @@ -7528,6 +7528,7 @@ components: - docker_repository - docker_image_tag - actor_definition_id + - updated_by properties: docker_repository: type: string @@ -7536,6 +7537,9 @@ components: actor_definition_id: type: string format: uuid + updated_by: + type: string + format: uuid ConnectorRolloutManualRolloutRequestBody: type: object @@ -7544,6 +7548,7 @@ components: - docker_image_tag - actor_definition_id - id + - updated_by - actor_ids properties: docker_repository: @@ -7556,6 +7561,9 @@ components: id: type: string format: uuid + updated_by: + type: string + format: uuid actor_ids: type: array items: @@ -7568,6 +7576,7 @@ components: - docker_image_tag - actor_definition_id - id + - updated_by - state properties: docker_repository: @@ -7580,6 +7589,9 @@ components: id: type: string format: uuid + updated_by: + type: string + format: uuid state: $ref: "#/components/schemas/ConnectorRolloutStateTerminal" error_msg: diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt index b2948843970..98a2d76c2b3 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt @@ -70,6 +70,11 @@ open class ConnectorRolloutHandler .hasBreakingChanges(connectorRollout.hasBreakingChanges) .rolloutStrategy(rolloutStrategy) .maxStepWaitTimeMins(connectorRollout.maxStepWaitTimeMins?.toInt()) + .updatedAt(connectorRollout.updatedAt?.let { unixTimestampToOffsetDateTime(it) }) + .createdAt(connectorRollout.createdAt?.let { unixTimestampToOffsetDateTime(it) }) + .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) + .errorMsg(connectorRollout.errorMsg) + .failedReason(connectorRollout.failedReason) .updatedBy( connectorRollout.rolloutStrategy?.let { strategy -> connectorRollout.updatedBy?.let { updatedBy -> @@ -150,6 +155,7 @@ open class ConnectorRolloutHandler dockerRepository: String, actorDefinitionId: UUID, dockerImageTag: String, + updatedBy: UUID, ): ConnectorRollout { val actorDefinitionVersion = actorDefinitionService.getActorDefinitionVersion( @@ -185,6 +191,7 @@ open class ConnectorRolloutHandler .withActorDefinitionId(actorDefinitionId) .withReleaseCandidateVersionId(actorDefinitionVersion.get().versionId) .withInitialVersionId(initialVersion.get().versionId) + .withUpdatedBy(updatedBy) .withState(ConnectorEnumRolloutState.INITIALIZED) .withHasBreakingChanges(false) connectorRolloutService.writeConnectorRollout(connectorRollout) @@ -358,6 +365,7 @@ open class ConnectorRolloutHandler connectorRolloutWorkflowStart.dockerRepository, connectorRolloutWorkflowStart.actorDefinitionId, connectorRolloutWorkflowStart.dockerImageTag, + connectorRolloutWorkflowStart.updatedBy, ) connectorRolloutClient.startWorkflow( ConnectorRolloutActivityInputStart( diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt index b2598992674..338e62ce606 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt @@ -85,6 +85,7 @@ internal class ConnectorRolloutHandlerTest { val DOCKER_IMAGE_TAG = "0.1" val ACTOR_DEFINITION_ID = UUID.randomUUID() val RELEASE_CANDIDATE_VERSION_ID = UUID.randomUUID() + val UPDATED_BY = UUID.randomUUID() @JvmStatic fun validInsertStates() = listOf(ConnectorEnumRolloutState.CANCELED_ROLLED_BACK) @@ -607,6 +608,7 @@ internal class ConnectorRolloutHandlerTest { dockerRepository = DOCKER_REPOSITORY dockerImageTag = DOCKER_IMAGE_TAG actorDefinitionId = ACTOR_DEFINITION_ID + updatedBy = UPDATED_BY } val connectorRollout = createMockConnectorRollout(rolloutId) @@ -693,7 +695,6 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput updates rollout when already exists in INITIALIZED state`() { val rolloutId = UUID.randomUUID() - val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -709,7 +710,7 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(any()) } returns Optional.of(createMockActorDefinitionVersion()) - val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, actorDefinitionId, dockerImageTag) + val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, actorDefinitionId, dockerImageTag, UPDATED_BY) assertEquals(connectorRollout.id, result.id) verifyAll { @@ -722,7 +723,6 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput throws when initial version is not found`() { val rolloutId = UUID.randomUUID() - val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -740,9 +740,10 @@ internal class ConnectorRolloutHandlerTest { assertThrows { connectorRolloutHandler.getOrCreateAndValidateManualStartInput( - dockerRepository, + DOCKER_REPOSITORY, actorDefinitionId, dockerImageTag, + UPDATED_BY, ) } } @@ -762,7 +763,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(ACTOR_DEFINITION_ID) } returns Optional.of(actorDefinitionVersion) every { connectorRolloutService.writeConnectorRollout(any()) } returns connectorRollout - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) verifyAll { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) @@ -777,7 +778,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.empty() assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) } verify { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } @@ -797,7 +798,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.of(actorDefinitionVersion) assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) } verifyAll { diff --git a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt index 7ab12f65342..8806170e8f2 100644 --- a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt +++ b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt @@ -111,6 +111,7 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, + UUID(0, 0), ) startWorkflow(rolloutClient, startInput) } @@ -132,6 +133,7 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, + UUID(0, 0), rolloutId!!, actorIds!!, ) @@ -144,6 +146,7 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, + UUID(0, 0), ConnectorRolloutStateTerminal.valueOf(ConnectorRolloutFinalState.SUCCEEDED.toString()), null, null, @@ -157,6 +160,7 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, + UUID(0, 0), ConnectorRolloutStateTerminal.FAILED_ROLLED_BACK, null, null, @@ -170,6 +174,7 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, + UUID(0, 0), ConnectorRolloutStateTerminal.CANCELED_ROLLED_BACK, null, null, @@ -187,6 +192,7 @@ class ConnectorRolloutCLI : Runnable { client: ConnectorRolloutApi, input: ConnectorRolloutManualStartRequestBody, ) { + logFormatted("CLI.startWorkflow using client", client) logFormatted("CLI.startWorkflow with input", input) logFormatted("CLI Rollout workflows status", client.manualStartConnectorRollout(input)) } diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt index 9cea8ec7dcf..2e59ea2c3b7 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt @@ -11,4 +11,5 @@ data class ConnectorRolloutActivityInputFinalize( var result: ConnectorRolloutFinalState, var errorMsg: String? = null, var failedReason: String? = null, + var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt index 4d18042bb85..2b928988d05 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt @@ -8,4 +8,5 @@ data class ConnectorRolloutActivityInputRollout( var actorDefinitionId: UUID, var rolloutId: UUID, var actorIds: List, + var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt index e2fafe9e3cc..6caa7e66bab 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt @@ -7,4 +7,5 @@ data class ConnectorRolloutActivityInputStart( var dockerImageTag: String, var actorDefinitionId: UUID, var rolloutId: UUID, + var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt index 71829ab1416..6c907eca037 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt @@ -37,6 +37,7 @@ class DoRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : Do input.rolloutId, input.actorIds, ConnectorRolloutStrategy.MANUAL, + input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt index ebf5798b749..973ebfc87b7 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt @@ -47,6 +47,7 @@ class FinalizeRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient ConnectorRolloutStrategy.MANUAL, errorMsg, failureReason, + input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt index 5088da35dcf..4297b37b261 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt @@ -38,6 +38,7 @@ class StartRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : input.rolloutId, workflowRunId, ConnectorRolloutStrategy.MANUAL, + input.updatedBy, ) return try { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java index 7dced66f8ad..e7b6c24e32f 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java @@ -34,7 +34,6 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; -import jakarta.validation.Valid; import java.util.UUID; @Controller("/api/v1/connector_rollout") @@ -133,7 +132,7 @@ public ConnectorRolloutListResponse getConnectorRolloutsList(@Body final Connect @Secured({ADMIN}) @ExecuteOn(AirbyteTaskExecutors.IO) @Override - public ConnectorRolloutListResponse getConnectorRolloutsListAll(@Valid Object body) { + public ConnectorRolloutListResponse getConnectorRolloutsListAll() { return ApiHelper.execute(() -> { final var connectorRollouts = connectorRolloutHandler.listConnectorRollouts(); return new ConnectorRolloutListResponse().connectorRollouts(connectorRollouts); diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index ded8be3a392..d1fee022f5d 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -349,8 +349,8 @@ temporal: enabled: ${TEMPORAL_CLOUD_ENABLED:false} host: ${TEMPORAL_CLOUD_HOST:} namespace: ${TEMPORAL_CLOUD_NAMESPACE:} - connectorRollout: - namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:connector-rollout-stage.ebc2e} + connector-rollout: + namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:} host: ${TEMPORAL_HOST:`airbyte-temporal:7233`} retention: ${TEMPORAL_HISTORY_RETENTION_IN_DAYS:30} sdk: From e1f5aace2362f5240357944c98bdb52fa30ce66e Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Tue, 1 Oct 2024 09:59:02 -0400 Subject: [PATCH 27/36] feat: allow viewing bytes or records in status page table (#14118) --- .../utils/useUiStreamsStates.test.ts | 334 ++++++++---------- .../connection/utils/useUiStreamsStates.ts | 7 +- .../src/core/utils/numberHelper.test.tsx | 12 +- airbyte-webapp/src/locales/en.json | 12 +- .../StreamStatusPage/LatestSyncCell.test.tsx | 101 +++++- .../StreamStatusPage/LatestSyncCell.tsx | 65 +++- .../StreamStatusPage/StreamsList.module.scss | 20 +- .../StreamStatusPage/StreamsList.tsx | 52 ++- 8 files changed, 360 insertions(+), 243 deletions(-) diff --git a/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.test.ts b/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.test.ts index c47b7ef61b2..af987199034 100644 --- a/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.test.ts +++ b/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.test.ts @@ -1,4 +1,4 @@ -import { QueryClient, useQueryClient } from "@tanstack/react-query"; +import { useQueryClient } from "@tanstack/react-query"; import { act, renderHook } from "@testing-library/react"; import { useConnectionStatus } from "components/connection/ConnectionStatus/useConnectionStatus"; @@ -13,7 +13,7 @@ import { useStreamsListContext } from "pages/connections/StreamStatusPage/Stream import { useHistoricalStreamData } from "./useStreamsHistoricalData"; import { useStreamsStatuses } from "./useStreamsStatuses"; import { useStreamsSyncProgress } from "./useStreamsSyncProgress"; -import { useUiStreamStates } from "./useUiStreamsStates"; +import { RateLimitedUIStreamState, useUiStreamStates } from "./useUiStreamsStates"; jest.mock("components/connection/ConnectionStatus/useConnectionStatus"); jest.mock("core/api"); @@ -26,228 +26,172 @@ jest.mock("@tanstack/react-query", () => ({ useQueryClient: jest.fn(), })); -describe("useUiStreamStates", () => { - const mockConnectionId = "test-connection-id"; - - const mockQueryClient = new QueryClient(); - const mockInvalidateQueries = jest.fn(); - mockQueryClient.invalidateQueries = mockInvalidateQueries; - - (useQueryClient as jest.Mock).mockReturnValue(mockQueryClient); - - const mockConnectionStatus = { - status: ConnectionStatusType.Pending, - isRunning: false, - }; +const mockConnectionId = "test-connection-id"; +const mockStreamSyncProgress = new Map([ + [ + "stream1-namespace1", + { + recordsEmitted: 1000, + recordsCommitted: 950, + bytesEmitted: 10200, + bytesCommitted: 9540, + configType: "sync", + }, + ], +]); - const mockStreamStatus = new Map([ - [ - "stream1-namespace1", - { - status: StreamStatusType.Synced, - relevantHistory: [], - lastSuccessfulSyncAt: 12345, - }, - ], - ]); - - const mockSyncProgress = new Map([ - [ - "stream1-namespace1", - { - recordsEmitted: 1000, - recordsCommitted: 950, - bytesEmitted: 10200, - bytesCommitted: 9540, - configType: "sync", - }, - ], - ]); - - const mockHistoricalData = new Map([ - [ - "stream1-namespace1", - { - recordsEmitted: 1000, - recordsCommitted: 950, - bytesEmitted: 10200, - bytesCommitted: 9540, - configType: "sync", - }, - ], - ]); - - const mockFilteredStreams = [ +const mockHistoricalData = new Map([ + [ + "stream1-namespace1", { - streamName: "stream1", - streamNamespace: "namespace1", + recordsEmitted: 1200, + recordsCommitted: 1200, + bytesEmitted: 10200, + bytesCommitted: 10200, + configType: "sync", }, - ]; + ], +]); + +const mockFilteredStreams = [ + { + streamName: "stream1", + streamNamespace: "namespace1", + }, +]; +describe("useUiStreamStates", () => { beforeEach(() => { jest.clearAllMocks(); - - (useConnectionStatus as jest.Mock).mockReturnValue(mockConnectionStatus); - (useGetConnectionSyncProgress as jest.Mock).mockReturnValue({ data: { jobId: 1 } }); (useStreamsListContext as jest.Mock).mockReturnValue({ filteredStreamsByName: mockFilteredStreams }); - (useHistoricalStreamData as jest.Mock).mockReturnValue({ - historicalStreamsData: mockHistoricalData, - isFetching: false, - }); - (useStreamsStatuses as jest.Mock).mockReturnValue({ streamStatuses: mockStreamStatus }); - (useStreamsSyncProgress as jest.Mock).mockReturnValue(mockSyncProgress); }); - describe("No running sync", () => { - it("should return correct UIStreamState when no syncs have been run", () => { - (useStreamsSyncProgress as jest.Mock).mockReturnValueOnce(new Map()); + it.each` + description | connectionStatus | historicalStreamsData | syncProgress | streamSyncProgress | streamStatuses | expectedRecordsExtracted | expectedRecordsLoaded | expectedBytesExtracted | expectedBytesLoaded | expectedStatus | expectedIsLoadingHistoricalData | expectedDataFreshAsOf + ${"not running, no historical data"} | ${{ status: ConnectionStatusType.Pending, isRunning: false }} | ${new Map()} | ${new Map()} | ${new Map()} | ${new Map([["stream1-namespace1", { status: StreamStatusType.Pending }]])} | ${undefined} | ${undefined} | ${undefined} | ${undefined} | ${StreamStatusType.Pending} | ${false} | ${undefined} + ${"not running, with historical data"} | ${{ status: ConnectionStatusType.Synced, isRunning: false }} | ${mockHistoricalData} | ${new Map()} | ${new Map()} | ${new Map([["stream1-namespace1", { status: StreamStatusType.Synced, relevantHistory: [], lastSuccessfulSyncAt: 12345 }]])} | ${undefined} | ${1200} | ${undefined} | ${10200} | ${StreamStatusType.Synced} | ${false} | ${12345} + ${"sync running, no historical data"} | ${{ status: ConnectionStatusType.Syncing, isRunning: true }} | ${new Map()} | ${{ activeSyncJobId: "active-job" }} | ${mockStreamSyncProgress} | ${new Map([["stream1-namespace1", { status: StreamStatusType.Syncing, relevantHistory: [] }]])} | ${1000} | ${950} | ${10200} | ${9540} | ${StreamStatusType.Syncing} | ${false} | ${undefined} + ${"sync running, with historical data"} | ${{ status: ConnectionStatusType.Syncing, isRunning: true }} | ${mockHistoricalData} | ${{ activeSyncJobId: "active-job" }} | ${mockStreamSyncProgress} | ${new Map([["stream1-namespace1", { status: StreamStatusType.Syncing, relevantHistory: [], lastSuccessfulSyncAt: 12345 }]])} | ${1000} | ${950} | ${10200} | ${9540} | ${StreamStatusType.Syncing} | ${false} | ${undefined} + `( + "$description", + async ({ + connectionStatus, + historicalStreamsData, + syncProgress, + streamSyncProgress, + streamStatuses, + expectedRecordsExtracted, + expectedRecordsLoaded, + expectedBytesExtracted, + expectedBytesLoaded, + expectedStatus, + expectedIsLoadingHistoricalData, + expectedDataFreshAsOf, + }) => { + (useConnectionStatus as jest.Mock).mockReturnValue(connectionStatus); + (useGetConnectionSyncProgress as jest.Mock).mockReturnValue(syncProgress); + (useStreamsSyncProgress as jest.Mock).mockReturnValue(streamSyncProgress); + (useStreamsStatuses as jest.Mock).mockReturnValue({ streamStatuses }); (useHistoricalStreamData as jest.Mock).mockReturnValue({ - historicalStreamsData: new Map(), - isFetching: false, - }); - (useStreamsStatuses as jest.Mock).mockReturnValue({ streamStatuses: new Map() }); - - const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { - wrapper: TestWrapper, + historicalStreamsData, + isFetching: expectedIsLoadingHistoricalData, }); + const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { wrapper: TestWrapper }); const uiStreamStates = result.current; - expect(uiStreamStates).toHaveLength(1); - const uiStreamState = uiStreamStates[0]; - - expect(uiStreamState.streamName).toBe("stream1"); - expect(uiStreamState.streamNamespace).toBe("namespace1"); - expect(uiStreamState.recordsExtracted).toBeUndefined(); - expect(uiStreamState.recordsLoaded).toBeUndefined(); - expect(uiStreamState.bytesLoaded).toBeUndefined(); - expect(uiStreamState.status).toBe(StreamStatusType.Pending); - expect(uiStreamState.dataFreshAsOf).toBeUndefined(); - }); - - it("should return correct UIStreamState when historical data is present", () => { - const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { - wrapper: TestWrapper, - }); - - const uiStreamStates = result.current; + expect(uiStreamStates[0].recordsExtracted).toBe(expectedRecordsExtracted); + expect(uiStreamStates[0].recordsLoaded).toBe(expectedRecordsLoaded); + expect(uiStreamStates[0].bytesExtracted).toBe(expectedBytesExtracted); + expect(uiStreamStates[0].bytesLoaded).toBe(expectedBytesLoaded); + expect(uiStreamStates[0].status).toBe(expectedStatus); + expect(uiStreamStates[0].isLoadingHistoricalData).toBe(expectedIsLoadingHistoricalData); + expect(uiStreamStates[0].dataFreshAsOf).toBe(expectedDataFreshAsOf); + } + ); +}); - expect(uiStreamStates).toHaveLength(1); - const uiStreamState = uiStreamStates[0]; - - expect(uiStreamState.streamName).toBe("stream1"); - expect(uiStreamState.streamNamespace).toBe("namespace1"); - expect(uiStreamState.recordsExtracted).toBe(1000); - expect(uiStreamState.recordsLoaded).toBe(950); - expect(uiStreamState.bytesLoaded).toBeUndefined(); - expect(uiStreamState.status).toBe(StreamStatusType.Synced); - expect(uiStreamState.dataFreshAsOf).toBeUndefined(); - }); - }); - describe("During running sync", () => { - it("should return correct UIStreamState for initial sync", () => { - (useHistoricalStreamData as jest.Mock).mockReturnValue({ - historicalStreamsData: new Map(), - isFetching: false, - }); - (useStreamsStatuses as jest.Mock).mockReturnValue({ - streamStatuses: new Map([ - [ - "stream1-namespace1", +it("should handle RateLimited status", () => { + (useStreamsStatuses as jest.Mock).mockReturnValue({ + streamStatuses: new Map([ + [ + "stream1-namespace1", + { + status: StreamStatusType.RateLimited, + relevantHistory: [ { - status: StreamStatusType.Syncing, - relevantHistory: [], - lastSuccessfulSyncAt: 12345, + jobType: StreamStatusJobType.SYNC, + runState: StreamStatusRunState.COMPLETE, + metadata: { quotaReset: 1234567890 }, }, ], - ]), - }); - - const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { - wrapper: TestWrapper, - }); - - const uiStreamStates = result.current; - - expect(uiStreamStates).toHaveLength(1); - const uiStreamState = uiStreamStates[0]; - - expect(uiStreamState.streamName).toBe("stream1"); - expect(uiStreamState.streamNamespace).toBe("namespace1"); - expect(uiStreamState.recordsExtracted).toBe(1000); - expect(uiStreamState.recordsLoaded).toBe(950); - expect(uiStreamState.bytesLoaded).toBeUndefined(); - expect(uiStreamState.status).toBe(StreamStatusType.Syncing); - expect(uiStreamState.dataFreshAsOf).toBeUndefined(); - }); + }, + ], + ]), }); - it("should correctly set isLoadingHistoricalData flag", () => { - (useHistoricalStreamData as jest.Mock).mockReturnValueOnce({ - historicalStreamsData: mockHistoricalData, - isFetching: true, - }); + const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { + wrapper: TestWrapper, + }); - const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { - wrapper: TestWrapper, - }); + const uiStreamStates = result.current as RateLimitedUIStreamState[]; - const uiStreamStates = result.current; + expect(uiStreamStates).toHaveLength(1); + expect(uiStreamStates[0].status).toBe(StreamStatusType.RateLimited); + expect(uiStreamStates[0].quotaReset).toBe(1234567890); +}); - expect(uiStreamStates).toHaveLength(1); - expect(uiStreamStates[0].isLoadingHistoricalData).toBe(true); +it("should handle post-job fetching correctly", async () => { + const mockInvalidateQueries = jest.fn(); + const mockQueryClient = { + invalidateQueries: mockInvalidateQueries, + }; + (useConnectionStatus as jest.Mock).mockReturnValueOnce({ + status: ConnectionStatusType.Syncing, + isRunning: true, }); + (useQueryClient as jest.Mock).mockReturnValue(mockQueryClient); + (useStreamsListContext as jest.Mock).mockReturnValue({ filteredStreamsByName: mockFilteredStreams }); - it("should handle RateLimited status", () => { - (useStreamsStatuses as jest.Mock).mockReturnValue({ - streamStatuses: new Map([ - [ - "stream1-namespace1", - { - status: StreamStatusType.RateLimited, - relevantHistory: [ - { - jobType: StreamStatusJobType.SYNC, - runState: StreamStatusRunState.COMPLETE, - metadata: { quotaReset: 1234567890 }, - }, - ], - }, - ], - ]), - }); - - const { result } = renderHook(() => useUiStreamStates(mockConnectionId), { - wrapper: TestWrapper, - }); - - const uiStreamStates = result.current; - - expect(uiStreamStates).toHaveLength(1); - expect(uiStreamStates[0].status).toBe(StreamStatusType.RateLimited); + (useGetConnectionSyncProgress as jest.Mock).mockReturnValue(new Map()); + (useStreamsSyncProgress as jest.Mock).mockReturnValue(new Map()); + (useHistoricalStreamData as jest.Mock).mockReturnValue({ + historicalStreamsData: new Map(), + isFetching: false, }); - it("should handle post-job fetching correctly", async () => { - (useConnectionStatus as jest.Mock).mockReturnValueOnce({ - ...mockConnectionStatus, - isRunning: true, - }); - - const { rerender } = renderHook(() => useUiStreamStates(mockConnectionId)); + (useStreamsStatuses as jest.Mock).mockReturnValue({ + streamStatuses: new Map([ + [ + "stream1-namespace1", + { + status: StreamStatusType.RateLimited, + relevantHistory: [ + { + jobType: StreamStatusJobType.SYNC, + runState: StreamStatusRunState.COMPLETE, + metadata: { quotaReset: 1234567890 }, + }, + ], + }, + ], + ]), + }); - // Simulate job completion by updating the connection status - (useConnectionStatus as jest.Mock).mockReturnValueOnce({ - ...mockConnectionStatus, - isRunning: false, - }); + const { rerender } = renderHook(() => useUiStreamStates(mockConnectionId)); - await act(async () => { - rerender(); - await new Promise((resolve) => setTimeout(resolve, 0)); // Wait for next tick - }); + // Simulate job completion by updating the connection status + (useConnectionStatus as jest.Mock).mockReturnValue({ + status: ConnectionStatusType.Synced, + isRunning: false, + }); - expect(mockInvalidateQueries).toHaveBeenCalledWith(connectionsKeys.lastJobPerStream(mockConnectionId)); - expect(mockInvalidateQueries).toHaveBeenCalledWith(connectionsKeys.uptimeHistory(mockConnectionId)); - expect(mockInvalidateQueries).toHaveBeenCalledWith(connectionsKeys.dataHistory(mockConnectionId)); + await act(async () => { + rerender(); + await new Promise((resolve) => setTimeout(resolve, 0)); // Wait for next tick }); + + expect(mockInvalidateQueries).toHaveBeenCalledWith(connectionsKeys.lastJobPerStream(mockConnectionId)); + expect(mockInvalidateQueries).toHaveBeenCalledWith(connectionsKeys.uptimeHistory(mockConnectionId)); + expect(mockInvalidateQueries).toHaveBeenCalledWith(connectionsKeys.dataHistory(mockConnectionId)); }); diff --git a/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts b/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts index 82918799733..5732cc8bf4d 100644 --- a/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts +++ b/airbyte-webapp/src/area/connection/utils/useUiStreamsStates.ts @@ -28,6 +28,7 @@ interface BaseUIStreamState { dataFreshAsOf?: number; recordsExtracted?: number; recordsLoaded?: number; + bytesExtracted?: number; bytesLoaded?: number; status: Exclude; isLoadingHistoricalData: boolean; @@ -86,6 +87,7 @@ export const useUiStreamStates = (connectionId: string): UIStreamState[] => { dataFreshAsOf: undefined, recordsExtracted: undefined, recordsLoaded: undefined, + bytesExtracted: undefined, bytesLoaded: undefined, status: StreamStatusType.Pending as StreamStatusType, // cast so TS keeps the wider UIStreamState union instead of narrowing to BaseUIStreamState isLoadingHistoricalData, @@ -102,11 +104,12 @@ export const useUiStreamStates = (connectionId: string): UIStreamState[] => { uiState.quotaReset = streamStatus.relevantHistory.at(0)?.metadata?.quotaReset; } } - // only pull from syncProgress OR historicalData for the latestSync related data if (syncProgressItem) { // also, for clear jobs, we should not show anything in this column uiState.recordsExtracted = syncProgressItem.recordsEmitted; + uiState.bytesExtracted = syncProgressItem.bytesEmitted; + uiState.bytesLoaded = syncProgressItem.bytesCommitted; uiState.recordsLoaded = syncProgressItem.recordsCommitted; uiState.activeJobStartedAt = currentJobId === streamStatus?.relevantHistory[0]?.jobId @@ -123,7 +126,6 @@ export const useUiStreamStates = (connectionId: string): UIStreamState[] => { const lastSuccessfulSync = streamStatus?.relevantHistory?.find( (status) => status.jobType === StreamStatusJobType.SYNC && status.runState === StreamStatusRunState.COMPLETE ); - uiState.dataFreshAsOf = // has the stream successfully cleared since it successfully synced? then it's not fresh // note: refresh jobs will register as StreamStatusJobType.SYNC, so this includes them (which it should) @@ -133,6 +135,5 @@ export const useUiStreamStates = (connectionId: string): UIStreamState[] => { return uiState; }); - return uiStreamStates; }; diff --git a/airbyte-webapp/src/core/utils/numberHelper.test.tsx b/airbyte-webapp/src/core/utils/numberHelper.test.tsx index 7aa78f4c2c5..f6204e5ac42 100644 --- a/airbyte-webapp/src/core/utils/numberHelper.test.tsx +++ b/airbyte-webapp/src/core/utils/numberHelper.test.tsx @@ -13,12 +13,12 @@ const _render = (args: Parameters) => describe("#formatBytes", () => { const cases: Array<[number | undefined, string]> = [ - [undefined, "0 Bytes"], - [0, "0 Bytes"], - [-1, "0 Bytes"], - [12, "12 Bytes"], - [1024 * 1 + 1, "1 KB"], - [1024 * 10 + 1, "10 KB"], + [undefined, "0 bytes"], + [0, "0 bytes"], + [-1, "0 bytes"], + [12, "12 bytes"], + [1024 * 1 + 1, "1 kB"], + [1024 * 10 + 1, "10 kB"], [1024 * 1024 + 1, "1 MB"], [1024 * 1024 * 10 + 1, "10 MB"], [1024 * 1024 * 1024 + 1, "1 GB"], diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index deba17513c5..4c9e347667b 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -496,11 +496,13 @@ "sources.countRecordsLoaded": "{count, plural, =0 {no records loaded} one {# record loaded} other {# records loaded}}", "sources.countLoaded": "{count, number} loaded", "sources.countExtracted": "{count, number} extracted", - "sources.countBytes": "{count, plural, =0 {0 Bytes} one {# Byte} other {# Bytes}}", - "sources.countKB": "{count} KB", + "sources.countBytes": "{count, plural, =0 {0 bytes} one {# byte} other {# bytes}}", + "sources.countKB": "{count} kB", "sources.countMB": "{count} MB", "sources.countGB": "{count} GB", "sources.countTB": "{count} TB", + "sources.bytesLoaded": "{count} loaded", + "sources.bytesExtracted": "{count} extracted", "sources.syncing": "Syncing", "sources.starting": "Starting…", "sources.sumOverAttempts": "Sum over attempts", @@ -830,7 +832,11 @@ "connection.stream.status.table.dataFreshAsOf.relative": "Show relative time", "connection.stream.status.table.dataFreshAsOf.absolute": "Show absolute time", "connection.stream.status.table.lastSync": "Last sync", - "connection.stream.status.table.latestSync": "Latest sync", + "connection.stream.status.table.latestSync.showBytes": "Show bytes synced", + "connection.stream.status.table.latestSync.showRecords": "Show records synced", + "connection.stream.status.table.latestSync": "Latest sync in {denomination}", + "connection.stream.status.table.latestSync.bytes": "bytes", + "connection.stream.status.table.latestSync.records": "records", "connection.stream.status.table.lastRecord": "Last record loaded", "connection.stream.status.table.emptyTable.message": "Re-enable the connection to show stream sync progress", "connection.stream.status.table.emptyTable.callToAction": "Re-enable", diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.test.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.test.tsx index e797d46d6e8..1807a2dddf4 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.test.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.test.tsx @@ -14,13 +14,32 @@ describe("LastSyncCell", () => { status={StreamStatusType.Synced} recordsLoaded={1000} recordsExtracted={1000} + bytesLoaded={1024 * 1024 * 1024} + bytesExtracted={1024 * 1024 * 1024} syncStartedAt={BASE_TIME - 1000} isLoadingHistoricalData={false} + showBytes={false} /> ); expect(result.container.textContent).toBe("1,000 loaded"); }); + it("past sync showing bytes", async () => { + const result = await render( + + ); + + expect(result.container.textContent).toBe("1 GB loaded"); + }); it("past sync without recordsLoaded", async () => { const result = await render( @@ -28,22 +47,60 @@ describe("LastSyncCell", () => { status={StreamStatusType.Synced} recordsLoaded={undefined} recordsExtracted={undefined} + bytesLoaded={undefined} + bytesExtracted={undefined} syncStartedAt={BASE_TIME - 1000} isLoadingHistoricalData={false} + showBytes={false} /> ); expect(result.container.textContent).toBe("-"); }); + it("past sync with nothing loaded but showing bytes", async () => { + const result = await render( + + ); + + expect(result.container.textContent).toBe("-"); + }); + + it("extracted == 0 && loaded == 0, show bytes false", async () => { + const result = await render( + + ); - it("extracted == 0 && loaded == 0", async () => { + expect(result.container.textContent).toBe("Starting… | 1m elapsed"); + }); + it("extracted == 0 && loaded == 0, show bytes true", async () => { const result = await render( ); @@ -56,28 +113,67 @@ describe("LastSyncCell", () => { status={StreamStatusType.Syncing} recordsLoaded={0} recordsExtracted={5000} + bytesLoaded={0} + bytesExtracted={1024 * 1024 * 200} // 200MB syncStartedAt={BASE_TIME - 130_000} isLoadingHistoricalData={false} + showBytes={false} /> ); expect(result.container.textContent).toBe("5,000 extracted | 2m elapsed"); }); + it("extracted > 0 && loaded == 0 and show bytes", async () => { + const result = await render( + + ); - it("extracted > 0 && loaded > 0", async () => { + expect(result.container.textContent).toBe("200 MB extracted | 2m elapsed"); + }); + + it("records extracted > 0 && loaded > 0", async () => { const result = await render( ); expect(result.container.textContent).toBe("3,000 loaded | 2m elapsed"); }); + it("bytes extracted > 0 && loaded > 0", async () => { + const result = await render( + + ); + + expect(result.container.textContent).toBe("200 MB loaded | 2m elapsed"); + }); + it.each([ [30, "a few seconds"], [3 * 24 * 60 * 60 * 1000, "72h"], @@ -91,6 +187,7 @@ describe("LastSyncCell", () => { recordsExtracted={5000} syncStartedAt={BASE_TIME - elapsedTime} isLoadingHistoricalData={false} + showBytes={false} /> ); diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.tsx index 7c53154d5cb..9116a1a6392 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/LatestSyncCell.tsx @@ -1,4 +1,5 @@ import dayjs from "dayjs"; +import { useMemo } from "react"; import { FormattedMessage } from "react-intl"; import { StreamStatusType } from "components/connection/StreamStatusIndicator"; @@ -7,6 +8,7 @@ import { Text } from "components/ui/Text"; import { Tooltip } from "components/ui/Tooltip"; import { activeStatuses } from "area/connection/utils"; +import { formatBytes } from "core/utils/numberHelper"; interface LatestSyncCellProps { recordsLoaded?: number; @@ -14,6 +16,9 @@ interface LatestSyncCellProps { syncStartedAt?: number; status: StreamStatusType; isLoadingHistoricalData: boolean; + showBytes: boolean; + bytesLoaded?: number; + bytesExtracted?: number; } export const LatestSyncCell: React.FC = ({ @@ -22,12 +27,49 @@ export const LatestSyncCell: React.FC = ({ syncStartedAt, status, isLoadingHistoricalData, + showBytes, + bytesLoaded, + bytesExtracted, }) => { const start = dayjs(syncStartedAt); const end = dayjs(Date.now()); const hours = Math.abs(end.diff(start, "hour")); const minutes = Math.abs(end.diff(start, "minute")) - hours * 60; + const valueToShow = useMemo(() => { + if (activeStatuses.includes(status)) { + // if we're showing bytes, show loaded bytes if they exist, otherwise show extracted bytes if they exist + + if (showBytes) { + if (bytesLoaded && bytesLoaded > 0) { + return ; + } else if (bytesExtracted && bytesExtracted > 0) { + return ; + } + } else if (!showBytes) { + // if we're showing records, show loaded records if they exist, otherwise show extracted records if they exist + if (recordsLoaded && recordsLoaded > 0) { + return ; + } else if (recordsExtracted && recordsExtracted > 0) { + return ; + } + } + // if none of them exist but the stream is active, show "starting" + return ; + } + + // if we're showing historical data, show the proper count or a placeholder if empty + if (!activeStatuses.includes(status)) { + if (showBytes && bytesLoaded !== undefined) { + return ; + } else if (!showBytes && recordsLoaded !== undefined) { + return ; + } + } + + return undefined; + }, [showBytes, recordsLoaded, recordsExtracted, status, bytesLoaded, bytesExtracted]); + if (!activeStatuses.includes(status) && isLoadingHistoricalData) { return ( @@ -39,11 +81,8 @@ export const LatestSyncCell: React.FC = ({ {!activeStatuses.includes(status) && ( - {recordsLoaded !== undefined ? ( - } - > + {valueToShow ? ( + ) : ( @@ -54,22 +93,12 @@ export const LatestSyncCell: React.FC = ({ {activeStatuses.includes(status) && ( <> - {!!recordsLoaded && recordsLoaded > 0 ? ( - } - > - - - ) : recordsExtracted ? ( - } - > + {valueToShow ? ( + ) : ( - + <>- )} {syncStartedAt && ( diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss index 85c55ea6208..dc420498ace 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.module.scss @@ -25,6 +25,13 @@ $card-header-fixed-height: 77px; .icon { margin-left: variables.$spacing-sm; } + + p { + margin-left: variables.$spacing-sm; + background-color: colors.$grey-100; + padding: variables.$spacing-xs variables.$spacing-sm; + border-radius: variables.$border-radius-sm; + } } .search { @@ -38,16 +45,7 @@ $card-header-fixed-height: 77px; } .syncing { - background: none; - @include mixins.striped-background(colors.$dark-blue-30, 30px); - } - - .syncing--next { background: colors.$grey-40; - - &.row:hover { - background-color: colors.$grey-40; - } } .statusHeader { @@ -62,6 +60,10 @@ $card-header-fixed-height: 77px; width: 50px; } + .latestSyncHeader { + width: 400px; + } + .row:hover { background-color: colors.$foreground; } diff --git a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx index e165f2d9363..369ea801cfa 100644 --- a/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx +++ b/airbyte-webapp/src/pages/connections/StreamStatusPage/StreamsList.tsx @@ -13,6 +13,7 @@ import { Heading } from "components/ui/Heading"; import { Icon } from "components/ui/Icon"; import { ScrollParentContext } from "components/ui/ScrollParent"; import { Table } from "components/ui/Table"; +import { Text } from "components/ui/Text"; import { Tooltip } from "components/ui/Tooltip"; import { activeStatuses } from "area/connection/utils"; @@ -30,6 +31,8 @@ import { StreamsListSubtitle } from "./StreamsListSubtitle"; export const StreamsList: React.FC = () => { const [showRelativeTime, setShowRelativeTime] = useToggle(true); + const [showBytes, setShowBytes] = useToggle(false); + const connection = useCurrentConnection(); const streamEntries = useUiStreamStates(connection.connectionId); const trackCountRef = useRef(0); @@ -48,23 +51,60 @@ export const StreamsList: React.FC = () => { columnHelper.accessor("streamName", { header: () => , cell: (props) => {props.cell.getValue()}, - meta: { responsive: true }, }), columnHelper.accessor("recordsLoaded", { id: "latestSync", - header: () => , + header: () => ( + + + + {showBytes ? ( + + ) : ( + + )} + + + } + > + + + ), + }} + /> + + ), cell: (props) => { return ( ); }, - meta: { responsive: true }, + meta: { + thClassName: styles.latestSyncHeader, + responsive: true, + }, }), columnHelper.accessor("dataFreshAsOf", { header: () => ( @@ -89,7 +129,6 @@ export const StreamsList: React.FC = () => { cell: (props) => ( ), - meta: { responsive: true }, }), columnHelper.accessor("dataFreshAsOf", { header: () => null, @@ -105,7 +144,7 @@ export const StreamsList: React.FC = () => { }, }), ], - [columnHelper, setShowRelativeTime, showRelativeTime] + [columnHelper, setShowBytes, setShowRelativeTime, showBytes, showRelativeTime] ); const { @@ -145,8 +184,7 @@ export const StreamsList: React.FC = () => { rowId={(row) => `${row.streamNamespace ?? ""}.${row.streamName}`} getRowClassName={(stream) => classNames(styles.row, { - [styles["syncing--next"]]: - activeStatuses.includes(stream.status) && stream.status !== StreamStatusType.Queued, + [styles.syncing]: activeStatuses.includes(stream.status) && stream.status !== StreamStatusType.Queued, }) } sorting={false} From a9d68165dd32a28d12904ba10072a9302cecf8c9 Mon Sep 17 00:00:00 2001 From: Teal Larson Date: Tue, 1 Oct 2024 10:40:15 -0400 Subject: [PATCH 28/36] fix: use correct intl translation for refresh jobs (#14193) Co-authored-by: Chandler Prall --- .../ConnectionTimelinePage/components/TimelineEventUser.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/TimelineEventUser.tsx b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/TimelineEventUser.tsx index 331e73d4f1a..a2e902c19b7 100644 --- a/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/TimelineEventUser.tsx +++ b/airbyte-webapp/src/pages/connections/ConnectionTimelinePage/components/TimelineEventUser.tsx @@ -22,9 +22,9 @@ export const UserCancelledDescription: React.FC = const messageId = jobType === "sync" ? `connection.timeline.sync_cancelled.description` - : "clear" + : jobType === "clear" ? `connection.timeline.clear_cancelled.description` - : "refresh" + : jobType === "refresh" ? `connection.timeline.refresh_cancelled.description` : null; From e8d31fce1e8f53ea6ca63bf5e161e7f408e0e5a9 Mon Sep 17 00:00:00 2001 From: Catherine Noll Date: Tue, 1 Oct 2024 11:29:23 -0400 Subject: [PATCH 29/36] revert: "refactor: connector rollout api endpoint updates (#14145)" (#14197) --- .../server-api/src/main/openapi/config.yaml | 30 ++++++------------- .../handlers/ConnectorRolloutHandler.kt | 8 ----- .../handlers/ConnectorRolloutHandlerTest.kt | 15 +++++----- .../rollout/client/ConnectorRolloutCLI.kt | 6 ---- .../ConnectorRolloutActivityInputFinalize.kt | 1 - .../ConnectorRolloutActivityInputRollout.kt | 1 - .../ConnectorRolloutActivityInputStart.kt | 1 - .../activities/DoRolloutActivityImpl.kt | 1 - .../activities/FinalizeRolloutActivityImpl.kt | 1 - .../activities/StartRolloutActivityImpl.kt | 1 - .../apis/ConnectorRolloutApiController.java | 3 +- .../src/main/resources/application.yml | 4 +-- 12 files changed, 20 insertions(+), 52 deletions(-) diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index e15410859c1..756e55ec0b2 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -5056,6 +5056,12 @@ paths: # Connector Rollouts /v1/connector_rollout/list_all: post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectorRolloutListAllRequestBody" + responses: "200": content: @@ -7362,6 +7368,9 @@ components: type: string format: uuid + ConnectorRolloutListAllRequestBody: + type: object + ConnectorRolloutListByActorDefinitionIdRequestBody: type: object required: @@ -7452,9 +7461,6 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" - updated_by: - type: string - format: uuid ConnectorRolloutStartResponse: type: object @@ -7480,9 +7486,6 @@ components: $ref: "#/components/schemas/ActorId" rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" - updated_by: - type: string - format: uuid ConnectorRolloutResponse: type: object @@ -7510,9 +7513,6 @@ components: type: string rollout_strategy: $ref: "#/components/schemas/ConnectorRolloutStrategy" - updated_by: - type: string - format: uuid ConnectorRolloutFinalizeResponse: type: object @@ -7528,7 +7528,6 @@ components: - docker_repository - docker_image_tag - actor_definition_id - - updated_by properties: docker_repository: type: string @@ -7537,9 +7536,6 @@ components: actor_definition_id: type: string format: uuid - updated_by: - type: string - format: uuid ConnectorRolloutManualRolloutRequestBody: type: object @@ -7548,7 +7544,6 @@ components: - docker_image_tag - actor_definition_id - id - - updated_by - actor_ids properties: docker_repository: @@ -7561,9 +7556,6 @@ components: id: type: string format: uuid - updated_by: - type: string - format: uuid actor_ids: type: array items: @@ -7576,7 +7568,6 @@ components: - docker_image_tag - actor_definition_id - id - - updated_by - state properties: docker_repository: @@ -7589,9 +7580,6 @@ components: id: type: string format: uuid - updated_by: - type: string - format: uuid state: $ref: "#/components/schemas/ConnectorRolloutStateTerminal" error_msg: diff --git a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt index 98a2d76c2b3..b2948843970 100644 --- a/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt +++ b/airbyte-commons-server/src/main/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandler.kt @@ -70,11 +70,6 @@ open class ConnectorRolloutHandler .hasBreakingChanges(connectorRollout.hasBreakingChanges) .rolloutStrategy(rolloutStrategy) .maxStepWaitTimeMins(connectorRollout.maxStepWaitTimeMins?.toInt()) - .updatedAt(connectorRollout.updatedAt?.let { unixTimestampToOffsetDateTime(it) }) - .createdAt(connectorRollout.createdAt?.let { unixTimestampToOffsetDateTime(it) }) - .expiresAt(connectorRollout.expiresAt?.let { unixTimestampToOffsetDateTime(it) }) - .errorMsg(connectorRollout.errorMsg) - .failedReason(connectorRollout.failedReason) .updatedBy( connectorRollout.rolloutStrategy?.let { strategy -> connectorRollout.updatedBy?.let { updatedBy -> @@ -155,7 +150,6 @@ open class ConnectorRolloutHandler dockerRepository: String, actorDefinitionId: UUID, dockerImageTag: String, - updatedBy: UUID, ): ConnectorRollout { val actorDefinitionVersion = actorDefinitionService.getActorDefinitionVersion( @@ -191,7 +185,6 @@ open class ConnectorRolloutHandler .withActorDefinitionId(actorDefinitionId) .withReleaseCandidateVersionId(actorDefinitionVersion.get().versionId) .withInitialVersionId(initialVersion.get().versionId) - .withUpdatedBy(updatedBy) .withState(ConnectorEnumRolloutState.INITIALIZED) .withHasBreakingChanges(false) connectorRolloutService.writeConnectorRollout(connectorRollout) @@ -365,7 +358,6 @@ open class ConnectorRolloutHandler connectorRolloutWorkflowStart.dockerRepository, connectorRolloutWorkflowStart.actorDefinitionId, connectorRolloutWorkflowStart.dockerImageTag, - connectorRolloutWorkflowStart.updatedBy, ) connectorRolloutClient.startWorkflow( ConnectorRolloutActivityInputStart( diff --git a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt index 338e62ce606..b2598992674 100644 --- a/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt +++ b/airbyte-commons-server/src/test/kotlin/io/airbyte/commons/server/handlers/ConnectorRolloutHandlerTest.kt @@ -85,7 +85,6 @@ internal class ConnectorRolloutHandlerTest { val DOCKER_IMAGE_TAG = "0.1" val ACTOR_DEFINITION_ID = UUID.randomUUID() val RELEASE_CANDIDATE_VERSION_ID = UUID.randomUUID() - val UPDATED_BY = UUID.randomUUID() @JvmStatic fun validInsertStates() = listOf(ConnectorEnumRolloutState.CANCELED_ROLLED_BACK) @@ -608,7 +607,6 @@ internal class ConnectorRolloutHandlerTest { dockerRepository = DOCKER_REPOSITORY dockerImageTag = DOCKER_IMAGE_TAG actorDefinitionId = ACTOR_DEFINITION_ID - updatedBy = UPDATED_BY } val connectorRollout = createMockConnectorRollout(rolloutId) @@ -695,6 +693,7 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput updates rollout when already exists in INITIALIZED state`() { val rolloutId = UUID.randomUUID() + val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -710,7 +709,7 @@ internal class ConnectorRolloutHandlerTest { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(any()) } returns Optional.of(createMockActorDefinitionVersion()) - val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, actorDefinitionId, dockerImageTag, UPDATED_BY) + val result = connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, actorDefinitionId, dockerImageTag) assertEquals(connectorRollout.id, result.id) verifyAll { @@ -723,6 +722,7 @@ internal class ConnectorRolloutHandlerTest { @Test fun `test getOrCreateAndValidateManualStartInput throws when initial version is not found`() { val rolloutId = UUID.randomUUID() + val dockerRepository = "airbyte/source-faker" val dockerImageTag = "0.1" val actorDefinitionId = UUID.randomUUID() val actorDefinitionVersion = createMockActorDefinitionVersion() @@ -740,10 +740,9 @@ internal class ConnectorRolloutHandlerTest { assertThrows { connectorRolloutHandler.getOrCreateAndValidateManualStartInput( - DOCKER_REPOSITORY, + dockerRepository, actorDefinitionId, dockerImageTag, - UPDATED_BY, ) } } @@ -763,7 +762,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getDefaultVersionForActorDefinitionIdOptional(ACTOR_DEFINITION_ID) } returns Optional.of(actorDefinitionVersion) every { connectorRolloutService.writeConnectorRollout(any()) } returns connectorRollout - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) verifyAll { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) @@ -778,7 +777,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.empty() assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(DOCKER_REPOSITORY, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } verify { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } @@ -798,7 +797,7 @@ internal class ConnectorRolloutHandlerTest { every { actorDefinitionService.getActorDefinitionVersion(ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } returns Optional.of(actorDefinitionVersion) assertThrows { - connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG, UPDATED_BY) + connectorRolloutHandler.getOrCreateAndValidateManualStartInput(dockerRepository, ACTOR_DEFINITION_ID, DOCKER_IMAGE_TAG) } verifyAll { diff --git a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt index 8806170e8f2..7ab12f65342 100644 --- a/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt +++ b/airbyte-connector-rollout-client/src/main/kotlin/io/airbyte/connector/rollout/client/ConnectorRolloutCLI.kt @@ -111,7 +111,6 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, - UUID(0, 0), ) startWorkflow(rolloutClient, startInput) } @@ -133,7 +132,6 @@ class ConnectorRolloutCLI : Runnable { dockerRepository, dockerImageTag, actorDefinitionId, - UUID(0, 0), rolloutId!!, actorIds!!, ) @@ -146,7 +144,6 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, - UUID(0, 0), ConnectorRolloutStateTerminal.valueOf(ConnectorRolloutFinalState.SUCCEEDED.toString()), null, null, @@ -160,7 +157,6 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, - UUID(0, 0), ConnectorRolloutStateTerminal.FAILED_ROLLED_BACK, null, null, @@ -174,7 +170,6 @@ class ConnectorRolloutCLI : Runnable { dockerImageTag, actorDefinitionId, rolloutId!!, - UUID(0, 0), ConnectorRolloutStateTerminal.CANCELED_ROLLED_BACK, null, null, @@ -192,7 +187,6 @@ class ConnectorRolloutCLI : Runnable { client: ConnectorRolloutApi, input: ConnectorRolloutManualStartRequestBody, ) { - logFormatted("CLI.startWorkflow using client", client) logFormatted("CLI.startWorkflow with input", input) logFormatted("CLI Rollout workflows status", client.manualStartConnectorRollout(input)) } diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt index 2e59ea2c3b7..9cea8ec7dcf 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputFinalize.kt @@ -11,5 +11,4 @@ data class ConnectorRolloutActivityInputFinalize( var result: ConnectorRolloutFinalState, var errorMsg: String? = null, var failedReason: String? = null, - var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt index 2b928988d05..4d18042bb85 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputRollout.kt @@ -8,5 +8,4 @@ data class ConnectorRolloutActivityInputRollout( var actorDefinitionId: UUID, var rolloutId: UUID, var actorIds: List, - var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt index 6caa7e66bab..e2fafe9e3cc 100644 --- a/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt +++ b/airbyte-connector-rollout-shared/src/main/kotlin/io/airbyte/connector/rollout/shared/models/ConnectorRolloutActivityInputStart.kt @@ -7,5 +7,4 @@ data class ConnectorRolloutActivityInputStart( var dockerImageTag: String, var actorDefinitionId: UUID, var rolloutId: UUID, - var updatedBy: UUID? = null, ) diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt index 6c907eca037..71829ab1416 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/DoRolloutActivityImpl.kt @@ -37,7 +37,6 @@ class DoRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : Do input.rolloutId, input.actorIds, ConnectorRolloutStrategy.MANUAL, - input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt index 973ebfc87b7..ebf5798b749 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/FinalizeRolloutActivityImpl.kt @@ -47,7 +47,6 @@ class FinalizeRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient ConnectorRolloutStrategy.MANUAL, errorMsg, failureReason, - input.updatedBy, ) return try { diff --git a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt index 4297b37b261..5088da35dcf 100644 --- a/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt +++ b/airbyte-connector-rollout-worker/src/main/kotlin/io/airbyte/connector/rollout/worker/activities/StartRolloutActivityImpl.kt @@ -38,7 +38,6 @@ class StartRolloutActivityImpl(private val airbyteApiClient: AirbyteApiClient) : input.rolloutId, workflowRunId, ConnectorRolloutStrategy.MANUAL, - input.updatedBy, ) return try { diff --git a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java index e7b6c24e32f..7dced66f8ad 100644 --- a/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java +++ b/airbyte-server/src/main/java/io/airbyte/server/apis/ConnectorRolloutApiController.java @@ -34,6 +34,7 @@ import io.micronaut.scheduling.annotation.ExecuteOn; import io.micronaut.security.annotation.Secured; import io.micronaut.security.rules.SecurityRule; +import jakarta.validation.Valid; import java.util.UUID; @Controller("/api/v1/connector_rollout") @@ -132,7 +133,7 @@ public ConnectorRolloutListResponse getConnectorRolloutsList(@Body final Connect @Secured({ADMIN}) @ExecuteOn(AirbyteTaskExecutors.IO) @Override - public ConnectorRolloutListResponse getConnectorRolloutsListAll() { + public ConnectorRolloutListResponse getConnectorRolloutsListAll(@Valid Object body) { return ApiHelper.execute(() -> { final var connectorRollouts = connectorRolloutHandler.listConnectorRollouts(); return new ConnectorRolloutListResponse().connectorRollouts(connectorRollouts); diff --git a/airbyte-server/src/main/resources/application.yml b/airbyte-server/src/main/resources/application.yml index d1fee022f5d..ded8be3a392 100644 --- a/airbyte-server/src/main/resources/application.yml +++ b/airbyte-server/src/main/resources/application.yml @@ -349,8 +349,8 @@ temporal: enabled: ${TEMPORAL_CLOUD_ENABLED:false} host: ${TEMPORAL_CLOUD_HOST:} namespace: ${TEMPORAL_CLOUD_NAMESPACE:} - connector-rollout: - namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:} + connectorRollout: + namespace: ${TEMPORAL_CLOUD_NAMESPACE_CONNECTOR_ROLLOUT:connector-rollout-stage.ebc2e} host: ${TEMPORAL_HOST:`airbyte-temporal:7233`} retention: ${TEMPORAL_HISTORY_RETENTION_IN_DAYS:30} sdk: From 312dd9553a22c0153841fc8eb9633af28c5ae614 Mon Sep 17 00:00:00 2001 From: Lake Mossman Date: Tue, 1 Oct 2024 08:45:34 -0700 Subject: [PATCH 30/36] fix: switching back to draft version in builder UI (#14191) --- .../connectorBuilder/ConnectorBuilderStateService.tsx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx index 63e973b4c0b..395620781d0 100644 --- a/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx +++ b/airbyte-webapp/src/services/connectorBuilder/ConnectorBuilderStateService.tsx @@ -423,7 +423,7 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< } if (displayedVersion === undefined && version !== undefined) { - setPreviousManifestDraft(jsonManifest); + setPreviousManifestDraft(resolvedManifest); } else if (version === undefined) { setPreviousManifestDraft(undefined); } @@ -445,7 +445,7 @@ export const InternalConnectorBuilderFormStateProvider: React.FC< setDisplayedVersion(version); setStateKey((key) => key + 1); }, - [currentProject.name, displayedVersion, getValues, jsonManifest, setStateKey, setValue] + [currentProject.name, displayedVersion, getValues, resolvedManifest, setStateKey, setValue] ); const { mutateAsync: sendPublishRequest } = usePublishBuilderProject(); From 9c88cdb6d10c84c0e63a93a81f4fbf16279b27ec Mon Sep 17 00:00:00 2001 From: Tim Roes Date: Tue, 1 Oct 2024 18:55:37 +0200 Subject: [PATCH 31/36] feat: add new payment config APIs (#14154) --- .../io/airbyte/api/problems/ResourceType.kt | 8 ++ .../src/main/openapi/api-problems.yaml | 2 +- .../server-api/src/main/openapi/config.yaml | 100 ++++++++++++++++++ .../src/main/resources/intents.yaml | 7 ++ .../OrganizationPaymentConfigService.kt | 2 + ...rganizationPaymentConfigServiceDataImpl.kt | 4 + airbyte-server/build.gradle.kts | 1 + .../OrganizationPaymentConfigController.kt | 94 ++++++++++++++++ ...OrganizationPaymentConfigControllerTest.kt | 53 ++++++++++ 9 files changed, 270 insertions(+), 1 deletion(-) create mode 100644 airbyte-api/problems-api/src/main/kotlin/io/airbyte/api/problems/ResourceType.kt create mode 100644 airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt create mode 100644 airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt diff --git a/airbyte-api/problems-api/src/main/kotlin/io/airbyte/api/problems/ResourceType.kt b/airbyte-api/problems-api/src/main/kotlin/io/airbyte/api/problems/ResourceType.kt new file mode 100644 index 00000000000..36362c50455 --- /dev/null +++ b/airbyte-api/problems-api/src/main/kotlin/io/airbyte/api/problems/ResourceType.kt @@ -0,0 +1,8 @@ +package io.airbyte.api.problems + +class ResourceType { + companion object { + const val ORGANIZATION = "organization" + const val ORGANIZATION_PAYMENT_CONFIG = "organization_payment_configuration" + } +} diff --git a/airbyte-api/problems-api/src/main/openapi/api-problems.yaml b/airbyte-api/problems-api/src/main/openapi/api-problems.yaml index 7752787347b..b1e075fbf30 100644 --- a/airbyte-api/problems-api/src/main/openapi/api-problems.yaml +++ b/airbyte-api/problems-api/src/main/openapi/api-problems.yaml @@ -468,7 +468,7 @@ components: properties: status: type: integer - default: 400 + default: 404 type: type: string default: https://reference.airbyte.com/reference/errors#resource-not-found diff --git a/airbyte-api/server-api/src/main/openapi/config.yaml b/airbyte-api/server-api/src/main/openapi/config.yaml index 756e55ec0b2..d18faa62e80 100644 --- a/airbyte-api/server-api/src/main/openapi/config.yaml +++ b/airbyte-api/server-api/src/main/openapi/config.yaml @@ -81,6 +81,8 @@ tags: description: Interactions with permission related resources. - name: organization description: Interactions with organizations. + - name: organization_payment_config + description: Interactions with organization payment configs - name: deployment_metadata description: Metadata about the Airbyte deployment - name: applications @@ -4680,6 +4682,73 @@ paths: application/json: schema: $ref: "#/components/schemas/OrganizationTrialStatusRead" + /v1/organization_payment_config/{organizationId}: + get: + summary: Get an organization payment config + tags: + - organization_payment_config + - billing + - cloud-only + - admin-api + operationId: getOrganizationPaymentConfig + parameters: + - in: path + name: organizationId + schema: + type: string + format: uuid + required: true + responses: + "200": + description: Successfully retrieved the organization payment config + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationPaymentConfigRead" + delete: + summary: Delete an organization payment config + tags: + - organization_payment_config + - billing + - cloud-only + - admin-api + operationId: deleteOrganizationPaymentConfig + parameters: + - in: path + name: organizationId + schema: + type: string + format: uuid + required: true + responses: + "204": + description: Successfully deleted organization payment config + "404": + description: Payment config didn't exist + /v1/organization_payment_config: + post: + summary: Create or update an organization payment config + tags: + - organization_payment_config + - billing + - cloud-only + - admin-api + operationId: updateOrganizationPaymentConfig + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationPaymentConfigUpdateRequestBody" + responses: + "200": + description: Updated the organization payment config + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationPaymentConfigRead" + "404": + description: Couldn't find the organization id + /v1/organizations/get: post: summary: Get an organization info @@ -10843,6 +10912,37 @@ components: trialEndsAt: $ref: "#/components/schemas/ISO8601DateTime" + # Organization payment config + OrganizationPaymentConfigRead: + type: object + required: + - organizationId + - paymentStatus + properties: + organizationId: + type: string + format: uuid + paymentStatus: + type: string + enum: + - uninitialized + - okay + - grace_period + - disabled + - locked + - manual + paymentProviderId: + type: string + gracePeriodEndAt: + $ref: "#/components/schemas/ISO8601DateTime" + usageCategoryOverwrite: + type: string + enum: + - free + - internal + OrganizationPaymentConfigUpdateRequestBody: + $ref: "#/components/schemas/OrganizationPaymentConfigRead" + # ORGANIZATIONS OrganizationId: type: string diff --git a/airbyte-commons-auth/src/main/resources/intents.yaml b/airbyte-commons-auth/src/main/resources/intents.yaml index 2bb9cf9e73a..8f364b1dfee 100644 --- a/airbyte-commons-auth/src/main/resources/intents.yaml +++ b/airbyte-commons-auth/src/main/resources/intents.yaml @@ -20,3 +20,10 @@ intents: roles: - ORGANIZATION_ADMIN - ADMIN + ManageOrganizationPaymentConfigs: + name: Manage organization payment configs + description: Ability to directly interact with organization payment configs + # This intent can only be used for instance admin at the moment, since the API it's used on are not all using + # POST request bodies, thus our authentication logic might not find the organizationId in the request. + roles: + - ADMIN diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationPaymentConfigService.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationPaymentConfigService.kt index 7c367d1da06..a27223184b8 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationPaymentConfigService.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/OrganizationPaymentConfigService.kt @@ -7,4 +7,6 @@ interface OrganizationPaymentConfigService { fun findByOrganizationId(organizationId: UUID): OrganizationPaymentConfig? fun savePaymentConfig(organizationPaymentConfig: OrganizationPaymentConfig): Unit + + fun deletePaymentConfig(organizationId: UUID): Unit } diff --git a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationPaymentConfigServiceDataImpl.kt b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationPaymentConfigServiceDataImpl.kt index 9c8440e72e7..03b5e1fdfc4 100644 --- a/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationPaymentConfigServiceDataImpl.kt +++ b/airbyte-data/src/main/kotlin/io/airbyte/data/services/impls/data/OrganizationPaymentConfigServiceDataImpl.kt @@ -22,4 +22,8 @@ class OrganizationPaymentConfigServiceDataImpl( organizationPaymentConfigRepository.save(organizationPaymentConfig.toEntity()) } } + + override fun deletePaymentConfig(organizationId: UUID) { + organizationPaymentConfigRepository.deleteById(organizationId) + } } diff --git a/airbyte-server/build.gradle.kts b/airbyte-server/build.gradle.kts index 0a2f75dfab6..b481e0124b8 100644 --- a/airbyte-server/build.gradle.kts +++ b/airbyte-server/build.gradle.kts @@ -96,6 +96,7 @@ dependencies { testImplementation(libs.mockito.inline) testImplementation(libs.reactor.test) testImplementation(libs.bundles.junit) + testImplementation(libs.bundles.kotest) testImplementation(libs.assertj.core) testImplementation(libs.junit.pioneer) testImplementation(libs.mockk) diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt new file mode 100644 index 00000000000..0aebfd4eff6 --- /dev/null +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigController.kt @@ -0,0 +1,94 @@ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.generated.OrganizationPaymentConfigApi +import io.airbyte.api.model.generated.OrganizationPaymentConfigRead +import io.airbyte.api.problems.ResourceType +import io.airbyte.api.problems.model.generated.ProblemResourceData +import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem +import io.airbyte.commons.auth.generated.Intent +import io.airbyte.commons.auth.permissions.RequiresIntent +import io.airbyte.commons.server.scheduling.AirbyteTaskExecutors +import io.airbyte.config.OrganizationPaymentConfig +import io.airbyte.config.OrganizationPaymentConfig.PaymentStatus +import io.airbyte.config.OrganizationPaymentConfig.UsageCategoryOverride +import io.airbyte.data.services.OrganizationPaymentConfigService +import io.airbyte.data.services.OrganizationService +import io.micronaut.http.HttpStatus +import io.micronaut.http.annotation.Body +import io.micronaut.http.annotation.Controller +import io.micronaut.http.annotation.Delete +import io.micronaut.http.annotation.Get +import io.micronaut.http.annotation.PathVariable +import io.micronaut.http.annotation.Post +import io.micronaut.http.annotation.Status +import io.micronaut.scheduling.annotation.ExecuteOn +import java.time.Instant +import java.time.OffsetDateTime +import java.time.ZoneId +import java.util.UUID + +private val UTC = ZoneId.of("UTC") + +@Controller("/api/v1/organization_payment_config") +open class OrganizationPaymentConfigController( + private val organizationPaymentConfigService: OrganizationPaymentConfigService, + private val organizationService: OrganizationService, +) : OrganizationPaymentConfigApi { + @RequiresIntent(Intent.ManageOrganizationPaymentConfigs) + @Get("/{organizationId}") + @ExecuteOn(AirbyteTaskExecutors.IO) + override fun getOrganizationPaymentConfig( + @PathVariable("organizationId") organizationId: UUID, + ): OrganizationPaymentConfigRead { + return organizationPaymentConfigService.findByOrganizationId(organizationId)?.toApiModel() + ?: throw ResourceNotFoundProblem( + ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), + ) + } + + @RequiresIntent(Intent.ManageOrganizationPaymentConfigs) + @Delete("/{organizationId}") + @ExecuteOn(AirbyteTaskExecutors.IO) + @Status(HttpStatus.NO_CONTENT) + override fun deleteOrganizationPaymentConfig( + @PathVariable("organizationId") organizationId: UUID, + ) { + if (organizationPaymentConfigService.findByOrganizationId(organizationId) == null) { + throw ResourceNotFoundProblem( + ProblemResourceData().resourceId(organizationId.toString()).resourceType(ResourceType.ORGANIZATION_PAYMENT_CONFIG), + ) + } + organizationPaymentConfigService.deletePaymentConfig(organizationId) + } + + @RequiresIntent(Intent.ManageOrganizationPaymentConfigs) + @Post + @ExecuteOn(AirbyteTaskExecutors.IO) + override fun updateOrganizationPaymentConfig( + @Body organizationPaymentConfigUpdateRequestBody: OrganizationPaymentConfigRead, + ): OrganizationPaymentConfigRead { + val orgId = organizationPaymentConfigUpdateRequestBody.organizationId + if (organizationService.getOrganization(orgId).isEmpty) { + throw ResourceNotFoundProblem(ProblemResourceData().resourceId(orgId.toString()).resourceType(ResourceType.ORGANIZATION)) + } + organizationPaymentConfigService.savePaymentConfig(organizationPaymentConfigUpdateRequestBody.toConfigModel()) + return getOrganizationPaymentConfig(orgId) + } +} + +private fun OrganizationPaymentConfig.toApiModel(): OrganizationPaymentConfigRead = + OrganizationPaymentConfigRead() + .organizationId(this.organizationId) + .paymentStatus(OrganizationPaymentConfigRead.PaymentStatusEnum.fromValue(this.paymentStatus.value())) + .paymentProviderId(this.paymentProviderId) + .gracePeriodEndAt(this.gracePeriodEndAt?.let { OffsetDateTime.ofInstant(Instant.ofEpochSecond(it), UTC) }) + .usageCategoryOverwrite(this.usageCategoryOverride?.let { OrganizationPaymentConfigRead.UsageCategoryOverwriteEnum.fromValue(it.value()) }) + +private fun OrganizationPaymentConfigRead.toConfigModel(): OrganizationPaymentConfig = + OrganizationPaymentConfig().also { + it.organizationId = this.organizationId + it.paymentStatus = PaymentStatus.fromValue(this.paymentStatus.value()) + it.paymentProviderId = this.paymentProviderId + it.gracePeriodEndAt = this.gracePeriodEndAt?.toEpochSecond() + it.usageCategoryOverride = this.usageCategoryOverwrite?.let { UsageCategoryOverride.fromValue(it.value()) } + } diff --git a/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt new file mode 100644 index 00000000000..80d314ddca8 --- /dev/null +++ b/airbyte-server/src/test/kotlin/io/airbyte/server/apis/controllers/OrganizationPaymentConfigControllerTest.kt @@ -0,0 +1,53 @@ +package io.airbyte.server.apis.controllers + +import io.airbyte.api.model.generated.OrganizationPaymentConfigRead +import io.airbyte.api.problems.throwable.generated.ResourceNotFoundProblem +import io.airbyte.data.services.OrganizationPaymentConfigService +import io.airbyte.data.services.OrganizationService +import io.kotest.assertions.throwables.shouldThrow +import io.mockk.every +import io.mockk.mockk +import org.junit.jupiter.api.BeforeEach +import org.junit.jupiter.api.Test +import java.util.Optional +import java.util.UUID + +class OrganizationPaymentConfigControllerTest { + private var organizationService = mockk() + private var organizationPaymentConfigService = mockk() + private lateinit var controller: OrganizationPaymentConfigController + + @BeforeEach + fun setup() { + controller = OrganizationPaymentConfigController(organizationPaymentConfigService, organizationService) + } + + @Test + fun `should throw for config not found on delete`() { + val orgId = UUID.randomUUID() + every { organizationPaymentConfigService.findByOrganizationId(orgId) } returns null + shouldThrow { + controller.deleteOrganizationPaymentConfig(orgId) + } + } + + @Test + fun `should throw for config not found on get`() { + val orgId = UUID.randomUUID() + every { organizationPaymentConfigService.findByOrganizationId(orgId) } returns null + shouldThrow { + controller.getOrganizationPaymentConfig(orgId) + } + } + + @Test + fun `invalid organization id should fail saving payment config`() { + val orgId = UUID.randomUUID() + every { organizationService.getOrganization(orgId) } returns Optional.empty() + shouldThrow { + controller.updateOrganizationPaymentConfig( + OrganizationPaymentConfigRead().organizationId(orgId).paymentStatus(OrganizationPaymentConfigRead.PaymentStatusEnum.MANUAL), + ) + } + } +} From 2f04a04f2e6db4561e4806e344c2499a6d824eb0 Mon Sep 17 00:00:00 2001 From: Tim Roes Date: Tue, 1 Oct 2024 18:56:33 +0200 Subject: [PATCH 32/36] feat: new top level billing banners (#14156) Co-authored-by: Chandler Prall --- .../src/main/resources/intents.yaml | 8 + .../apis/controllers/BillingController.kt | 2 +- .../ui/Banner/AlertBanner.module.scss | 2 +- .../src/components/ui/Banner/AlertBanner.tsx | 13 +- .../src/core/api/hooks/organizations.ts | 18 +- airbyte-webapp/src/locales/en.json | 16 +- .../WorkspaceStatusBanner.test.tsx | 0 .../WorkspaceStatusBanner.tsx | 2 +- .../StatusBanner/StatusBanner.test.tsx | 224 ++++++++++++++++++ .../components/StatusBanner/StatusBanner.tsx | 28 +++ .../billing/utils/useBillingStatusBanner.tsx | 121 ++++++++-- .../AccountBalance/AccountBalance.tsx | 2 +- .../BillingBanners.tsx | 2 +- .../OrganizationBillingPage.tsx | 1 + .../layout/CloudMainView/CloudMainView.tsx | 10 +- 15 files changed, 409 insertions(+), 40 deletions(-) rename airbyte-webapp/src/packages/cloud/{views/layout/CloudMainView => area/billing/components/StatusBanner/LegacyStatusBanner}/WorkspaceStatusBanner.test.tsx (100%) rename airbyte-webapp/src/packages/cloud/{views/layout/CloudMainView => area/billing/components/StatusBanner/LegacyStatusBanner}/WorkspaceStatusBanner.tsx (97%) create mode 100644 airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx create mode 100644 airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx diff --git a/airbyte-commons-auth/src/main/resources/intents.yaml b/airbyte-commons-auth/src/main/resources/intents.yaml index 8f364b1dfee..1bf22452630 100644 --- a/airbyte-commons-auth/src/main/resources/intents.yaml +++ b/airbyte-commons-auth/src/main/resources/intents.yaml @@ -14,6 +14,14 @@ intents: roles: - ORGANIZATION_ADMIN - ADMIN + ViewOrganizationTrialStatus: + name: View organization trial status + description: Ability to see the trial status of an organization + roles: + - ORGANIZATION_READER + - ORGANIZATION_EDITOR + - ORGANIZATION_ADMIN + - ADMIN ManageOrganizationBilling: name: Manage organization billing description: View and edit billing information for the organization diff --git a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt index 8dac45f322d..c39ae0e2f8e 100644 --- a/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt +++ b/airbyte-server/src/main/kotlin/io/airbyte/server/apis/controllers/BillingController.kt @@ -54,7 +54,7 @@ open class BillingController : BillingApi { @Body event: JsonNode, ): Unit = throw ApiNotImplementedInOssProblem() - @RequiresIntent(Intent.ManageOrganizationBilling) + @RequiresIntent(Intent.ViewOrganizationTrialStatus) @Post("/trial_status") @ExecuteOn(AirbyteTaskExecutors.IO) override fun getOrganizationTrialStatus( diff --git a/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss b/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss index 52444b4b503..e990201c7a8 100644 --- a/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss +++ b/airbyte-webapp/src/components/ui/Banner/AlertBanner.module.scss @@ -15,7 +15,7 @@ } } -.default { +.info { background-color: colors.$blue; color: colors.$white; } diff --git a/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx b/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx index 32e91d33531..83b620e1fe2 100644 --- a/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx +++ b/airbyte-webapp/src/components/ui/Banner/AlertBanner.tsx @@ -4,16 +4,21 @@ import React from "react"; import styles from "./AlertBanner.module.scss"; interface AlertBannerProps { - color?: "default" | "warning" | "error"; + color?: "info" | "warning" | "error"; message: React.ReactNode; + "data-testid"?: string; } -export const AlertBanner: React.FC = ({ color = "default", message }) => { +export const AlertBanner: React.FC = ({ color = "info", message, ...rest }) => { const bannerStyle = classnames(styles.alertBannerContainer, { - [styles.default]: color === "default", + [styles.info]: color === "info", [styles.yellow]: color === "warning", [styles.red]: color === "error", }); - return
{message}
; + return ( +
+ {message} +
+ ); }; diff --git a/airbyte-webapp/src/core/api/hooks/organizations.ts b/airbyte-webapp/src/core/api/hooks/organizations.ts index f5d46e243f5..1864b60baff 100644 --- a/airbyte-webapp/src/core/api/hooks/organizations.ts +++ b/airbyte-webapp/src/core/api/hooks/organizations.ts @@ -8,10 +8,11 @@ import { getOrganizationInfo, listUsersInOrganization, updateOrganization, + getOrganizationTrialStatus, } from "../generated/AirbyteClient"; import { OrganizationUpdateRequestBody } from "../generated/AirbyteClient.schemas"; import { SCOPE_ORGANIZATION, SCOPE_USER } from "../scopes"; -import { OrganizationUserReadList } from "../types/AirbyteClient"; +import { OrganizationTrialStatusRead, OrganizationUserReadList } from "../types/AirbyteClient"; import { useRequestOptions } from "../useRequestOptions"; import { useSuspenseQuery } from "../useSuspenseQuery"; @@ -23,6 +24,7 @@ export const organizationKeys = { detail: (organizationId = "") => [...organizationKeys.all, "details", organizationId] as const, allListUsers: [SCOPE_ORGANIZATION, "users", "list"] as const, listUsers: (organizationId: string) => [SCOPE_ORGANIZATION, "users", "list", organizationId] as const, + trialStatus: (organizationId: string) => [SCOPE_ORGANIZATION, "trial", organizationId] as const, }; /** @@ -90,3 +92,17 @@ export const useListUsersInOrganization = (organizationId?: string): Organizatio } ); }; + +export const useOrganizationTrialStatus = ( + organizationId: string, + enabled: boolean +): OrganizationTrialStatusRead | undefined => { + const requestOptions = useRequestOptions(); + return useSuspenseQuery( + organizationKeys.trialStatus(organizationId), + () => { + return getOrganizationTrialStatus({ organizationId }, requestOptions); + }, + { enabled } + ); +}; diff --git a/airbyte-webapp/src/locales/en.json b/airbyte-webapp/src/locales/en.json index 4c9e347667b..d5d5d8c4610 100644 --- a/airbyte-webapp/src/locales/en.json +++ b/airbyte-webapp/src/locales/en.json @@ -1042,10 +1042,6 @@ "settings.billing.credits.minimumVisibleAmount": "< {zeroPointZeroOne}", "settings.organization.billing.notSetUp": "Billing has not yet been set up for this organization. Start by adding a payment method.", "settings.organization.billing.title": "Billing", - "settings.organization.billing.manualPaymentStatus": "Reach out to Sales if you have any questions about your plan.", - "settings.organization.billing.lockedPaymentStatus": "Your syncs are disabled. Please get in touch with {mail} to unlock your account.", - "settings.organization.billing.disabledPaymentStatus": "Your syncs are disabled due to unpaid invoices. Please update your payment method to enable syncing again.", - "settings.organization.billing.gracePeriodPaymentStatus": "Please update your payment method to keep using Airbyte. Otherwise your syncs will be disabled {days, plural, =0 {very soon} one {in # day} other {in # days}}.", "settings.organization.billing.billingInformation": "Billing information", "settings.organization.billing.billingInformationError": "Error loading billing information", "settings.organization.billing.update": "Update", @@ -2075,6 +2071,18 @@ "credits.zeroBalance": "All your connections have been disabled because your credit balance is 0. Buy credits or enroll in auto-recharge to enable your data to sync.", "billing.pbaBillingActive": "This workspace is part of the {organizationName} organization and is billed on the organization level. For further billing questions, please contact us.", + "billing.banners.manualPaymentStatus": "Reach out to Sales if you have any questions about your plan.", + "billing.banners.lockedPaymentStatus": "Your syncs are disabled. Please get in touch with {mail} to unlock your account.", + "billing.banners.disabledPaymentStatus": "Your syncs are disabled due to unpaid invoices. Please update your payment method to enable syncing again.", + "billing.banners.disabledPaymentStatusWithLink": "Your syncs are disabled due to unpaid invoices. Please update your payment method to enable syncing again.", + "billing.banners.gracePeriodPaymentStatus": "Please update your payment method to keep using Airbyte. Otherwise your syncs will be disabled {days, plural, =0 {very soon} one {in # day} other {in # days}}.", + "billing.banners.gracePeriodPaymentStatusWithLink": "Please update your payment method to keep using Airbyte. Otherwise your syncs will be disabled {days, plural, =0 {very soon} one {in # day} other {in # days}}.", + "billing.banners.preTrial": "Your 14-day trial of Airbyte will start once your first sync has succeeded.", + "billing.banners.inTrial": "Your trial ends in {days, plural, =0 {today} one {# day} other {# days}}. Enter payment details to keep using Airbyte after your trial ends.", + "billing.banners.inTrialWithLink": "Your trial ends {days, plural, =0 {today} one {in # day} other {in # days}}. Enter payment details to keep using Airbyte after your trial ends.", + "billing.banners.inTrialWithPaymentMethod": "Your trial ends {days, plural, =0 {today} one {in # day} other {in # days}}.", + "billing.banners.postTrial": "Enter payment details to keep your syncs running.", + "billing.banners.postTrialWithLink": "Enter payment details to keep your syncs running.", "trial.alertMessage": "You are using a trial of Airbyte. Your trial ends in {remainingDays, plural, one {# day} other {# days}}. Purchase now", diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/WorkspaceStatusBanner.test.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.test.tsx similarity index 100% rename from airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/WorkspaceStatusBanner.test.tsx rename to airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.test.tsx diff --git a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/WorkspaceStatusBanner.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.tsx similarity index 97% rename from airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/WorkspaceStatusBanner.tsx rename to airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.tsx index 9f1db93d760..b762870a52c 100644 --- a/airbyte-webapp/src/packages/cloud/views/layout/CloudMainView/WorkspaceStatusBanner.tsx +++ b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/LegacyStatusBanner/WorkspaceStatusBanner.tsx @@ -72,7 +72,7 @@ export const WorkspaceStatusBanner: React.FC = ({ cl )} diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx new file mode 100644 index 00000000000..a62129bfdec --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.test.tsx @@ -0,0 +1,224 @@ +import dayjs from "dayjs"; + +import { mocked, render } from "test-utils"; + +import { useCurrentOrganizationInfo, useOrganizationTrialStatus } from "core/api"; +import { OrganizationTrialStatusRead, WorkspaceOrganizationInfoReadBilling } from "core/api/types/AirbyteClient"; +import { Intent, useGeneratedIntent } from "core/utils/rbac"; + +import { StatusBanner } from "./StatusBanner"; + +jest.mock("core/api", () => ({ + useCurrentOrganizationInfo: jest.fn(), + useOrganizationTrialStatus: jest.fn(), +})); + +jest.mock("area/workspace/utils", () => ({ + useCurrentWorkspaceId: jest.fn().mockReturnValue("workspace-1"), + useCurrentWorkspaceLink: jest.fn().mockReturnValue((link: string) => link), +})); + +// We just mock out the legacy workspace banner, since that file has its own tests +jest.mock("./LegacyStatusBanner/WorkspaceStatusBanner", () => ({ + WorkspaceStatusBanner: () =>
, +})); + +jest.mock("core/api/cloud", () => ({ + useGetCloudWorkspaceAsync: jest.fn().mockReturnValue({ + workspaceId: "workspace-1", + }), +})); + +jest.mock("core/utils/rbac", () => ({ + useGeneratedIntent: jest.fn(), + Intent: jest.requireActual("core/utils/rbac").Intent, +})); + +const mockOrgInfo = (billing: WorkspaceOrganizationInfoReadBilling | undefined) => { + mocked(useCurrentOrganizationInfo).mockReturnValue({ + organizationId: "org-1", + organizationName: "org name", + sso: false, + pba: false, + billing, + }); +}; + +const mockTrialStatus = (trialStatus: OrganizationTrialStatusRead) => { + mocked(useOrganizationTrialStatus).mockReturnValue(trialStatus); +}; + +const mockGeneratedIntent = (options: { canViewTrialStatus: boolean; canManageOrganizationBilling: boolean }) => { + mocked(useGeneratedIntent).mockImplementation((intent) => { + switch (intent) { + case Intent.ViewOrganizationTrialStatus: + return options.canViewTrialStatus; + case Intent.ManageOrganizationBilling: + return options.canManageOrganizationBilling; + default: + throw new Error(`Intent ${intent} is not mocked.`); + } + }); +}; + +describe("StatusBanner", () => { + it("should render legacy banner if no billing information is available", async () => { + mockOrgInfo(undefined); + const wrapper = await render(); + expect(wrapper.getByTestId("mockLegacyWorkspaceBanner")).toBeInTheDocument(); + }); + + it("should render nothing with paymentStatus=OKAY and not in trial", async () => { + mockOrgInfo({ paymentStatus: "okay" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container).toHaveTextContent(""); + }); + + it("should not render anything for manual billing", async () => { + mockOrgInfo({ paymentStatus: "manual" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container).toHaveTextContent(""); + }); + + it("should not render locked banner", async () => { + mockOrgInfo({ paymentStatus: "locked" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Your syncs are disabled."); + expect(wrapper.container.textContent).toContain("billing@airbyte.io"); + }); + + it("should render disabled banner w/o link", async () => { + mockOrgInfo({ paymentStatus: "disabled" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: false }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Your syncs are disabled due to unpaid invoices."); + expect(wrapper.queryByRole("link")).not.toBeInTheDocument(); + }); + + it("should render disabled banner w/ link", async () => { + mockOrgInfo({ paymentStatus: "disabled" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Your syncs are disabled due to unpaid invoices."); + expect(wrapper.queryByRole("link")).toBeInTheDocument(); + }); + + it("should render grace period banner w/o link (1 day)", async () => { + mockOrgInfo({ paymentStatus: "grace_period", gracePeriodEndsAt: dayjs().add(25, "hours").valueOf() / 1000 }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: false }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("your syncs will be disabled in 1 day"); + expect(wrapper.queryByRole("link")).not.toBeInTheDocument(); + }); + + it("should render grace period banner w/o link (very soon)", async () => { + mockOrgInfo({ paymentStatus: "grace_period", gracePeriodEndsAt: dayjs().add(5, "hours").valueOf() / 1000 }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: false }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("your syncs will be disabled very soon"); + expect(wrapper.queryByRole("link")).not.toBeInTheDocument(); + }); + + it("should render grace period banner w/ link (1 day)", async () => { + mockOrgInfo({ paymentStatus: "grace_period", gracePeriodEndsAt: dayjs().add(25, "hours").valueOf() / 1000 }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("your syncs will be disabled in 1 day"); + expect(wrapper.queryByRole("link")).toBeInTheDocument(); + }); + + it("should render grace period banner w/ link (very soon)", async () => { + mockOrgInfo({ paymentStatus: "grace_period", gracePeriodEndsAt: dayjs().add(5, "hours").valueOf() / 1000 }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("your syncs will be disabled very soon"); + expect(wrapper.queryByRole("link")).toBeInTheDocument(); + }); + + it("should render pre-trial banner", async () => { + mockOrgInfo({ paymentStatus: "uninitialized" }); + mockTrialStatus({ trialStatus: "pre_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("once your first sync has succeeded"); + }); + + it("should not show a trial banner if the user cannot view trial status", async () => { + mockOrgInfo({ paymentStatus: "uninitialized" }); + mockTrialStatus({ trialStatus: "pre_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: false }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain(""); + }); + + it("should render in-trial banner w/o link", async () => { + mockOrgInfo({ paymentStatus: "uninitialized" }); + mockTrialStatus({ + trialStatus: "in_trial", + trialEndsAt: dayjs() + .add(5 * 24 + 1, "hours") + .toISOString(), + }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: false }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Your trial ends in 5 days."); + expect(wrapper.queryByRole("link")).not.toBeInTheDocument(); + }); + + it("should render in-trial banner w/ link", async () => { + mockOrgInfo({ paymentStatus: "uninitialized" }); + mockTrialStatus({ + trialStatus: "in_trial", + trialEndsAt: dayjs() + .add(5 * 24 + 1, "hours") + .toISOString(), + }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Your trial ends in 5 days."); + expect(wrapper.queryByRole("link")).toBeInTheDocument(); + }); + + it("should render post-trial banner w/o link", async () => { + mockOrgInfo({ paymentStatus: "uninitialized" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: false }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Enter payment details"); + expect(wrapper.queryByRole("link")).not.toBeInTheDocument(); + }); + + it("should render post-trial banner w/ link", async () => { + mockOrgInfo({ paymentStatus: "uninitialized" }); + mockTrialStatus({ trialStatus: "post_trial" }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Enter payment details"); + expect(wrapper.queryByRole("link")).toBeInTheDocument(); + }); + + it("should render in-trial banner w/ payment method", async () => { + mockOrgInfo({ paymentStatus: "okay" }); + mockTrialStatus({ + trialStatus: "in_trial", + trialEndsAt: dayjs() + .add(5 * 24 + 1, "hours") + .toISOString(), + }); + mockGeneratedIntent({ canViewTrialStatus: true, canManageOrganizationBilling: true }); + const wrapper = await render(); + expect(wrapper.container.textContent).toContain("Your trial ends in 5 days."); + }); +}); diff --git a/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx new file mode 100644 index 00000000000..23825e31ace --- /dev/null +++ b/airbyte-webapp/src/packages/cloud/area/billing/components/StatusBanner/StatusBanner.tsx @@ -0,0 +1,28 @@ +import React from "react"; + +import { AlertBanner } from "components/ui/Banner/AlertBanner"; + +import { useCurrentWorkspaceId } from "area/workspace/utils"; +import { useCurrentOrganizationInfo } from "core/api"; +import { useGetCloudWorkspaceAsync } from "core/api/cloud"; + +import { WorkspaceStatusBanner as LegacyWorkspaceStatusBanner } from "./LegacyStatusBanner/WorkspaceStatusBanner"; +import { useBillingStatusBanner } from "../../utils/useBillingStatusBanner"; + +const LegacyStatusBanner: React.FC = () => { + const workspaceId = useCurrentWorkspaceId(); + const cloudWorkspace = useGetCloudWorkspaceAsync(workspaceId); + return cloudWorkspace ? : null; +}; + +const WorkspaceStatusBanner: React.FC = () => { + const statusBanner = useBillingStatusBanner("top_level"); + return statusBanner ? ( + + ) : null; +}; + +export const StatusBanner: React.FC = () => { + const { billing } = useCurrentOrganizationInfo(); + return {billing ? : }; +}; diff --git a/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx b/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx index 1a9aa7b7ed9..49dfc807b01 100644 --- a/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx +++ b/airbyte-webapp/src/packages/cloud/area/billing/utils/useBillingStatusBanner.tsx @@ -1,45 +1,58 @@ import dayjs from "dayjs"; import { useIntl } from "react-intl"; -import { ExternalLink } from "components/ui/Link"; +import { ExternalLink, Link } from "components/ui/Link"; -import { useCurrentOrganizationInfo } from "core/api"; +import { useCurrentWorkspaceLink } from "area/workspace/utils"; +import { useCurrentOrganizationInfo, useOrganizationTrialStatus } from "core/api"; import { links } from "core/utils/links"; +import { Intent, useGeneratedIntent } from "core/utils/rbac"; +import { CloudRoutes } from "packages/cloud/cloudRoutePaths"; +import { RoutePaths } from "pages/routePaths"; interface BillingStatusBanner { content: React.ReactNode; level: "warning" | "info"; } -export const useBillingStatusBanner = (): BillingStatusBanner | undefined => { +export const useBillingStatusBanner = (context: "top_level" | "billing_page"): BillingStatusBanner | undefined => { const { formatMessage } = useIntl(); - const { billing } = useCurrentOrganizationInfo(); + const createLink = useCurrentWorkspaceLink(); + const { organizationId, billing } = useCurrentOrganizationInfo(); + const canViewTrialStatus = useGeneratedIntent(Intent.ViewOrganizationTrialStatus); + const canManageOrganizationBilling = useGeneratedIntent(Intent.ManageOrganizationBilling); + const trialStatus = useOrganizationTrialStatus( + organizationId, + (billing?.paymentStatus === "uninitialized" || billing?.paymentStatus === "okay") && canViewTrialStatus + ); if (!billing) { return undefined; } if (billing.paymentStatus === "manual") { - return { - level: "info", - content: formatMessage( - { id: "settings.organization.billing.manualPaymentStatus" }, - { - lnk: (node: React.ReactNode) => ( - - {node} - + return context === "top_level" + ? undefined + : { + level: "info", + content: formatMessage( + { id: "billing.banners.manualPaymentStatus" }, + { + lnk: (node: React.ReactNode) => ( + + {node} + + ), + } ), - } - ), - }; + }; } if (billing.paymentStatus === "locked") { return { level: "warning", content: formatMessage( - { id: "settings.organization.billing.lockedPaymentStatus" }, + { id: "billing.banners.lockedPaymentStatus" }, { mail: ( @@ -54,7 +67,19 @@ export const useBillingStatusBanner = (): BillingStatusBanner | undefined => { if (billing.paymentStatus === "disabled") { return { level: "warning", - content: formatMessage({ id: "settings.organization.billing.disabledPaymentStatus" }), + content: formatMessage( + { + id: + context === "top_level" && canManageOrganizationBilling + ? "billing.banners.disabledPaymentStatusWithLink" + : "billing.banners.disabledPaymentStatus", + }, + { + lnk: (node: React.ReactNode) => ( + {node} + ), + } + ), }; } @@ -62,15 +87,73 @@ export const useBillingStatusBanner = (): BillingStatusBanner | undefined => { return { level: "warning", content: formatMessage( - { id: "settings.organization.billing.gracePeriodPaymentStatus" }, + { + id: + context === "top_level" && canManageOrganizationBilling + ? "billing.banners.gracePeriodPaymentStatusWithLink" + : "billing.banners.gracePeriodPaymentStatus", + }, { days: billing?.gracePeriodEndsAt ? Math.max(dayjs(billing.gracePeriodEndsAt * 1000).diff(dayjs(), "days"), 0) : 0, + lnk: (node: React.ReactNode) => ( + {node} + ), } ), }; } + if (billing.paymentStatus === "uninitialized" && trialStatus) { + if (trialStatus.trialStatus === "pre_trial") { + return { + level: "info", + content: formatMessage({ id: "billing.banners.preTrial" }), + }; + } + if (trialStatus.trialStatus === "in_trial") { + return { + level: "info", + content: formatMessage( + { + id: canManageOrganizationBilling ? "billing.banners.inTrialWithLink" : "billing.banners.inTrial", + }, + { + days: Math.max(dayjs(trialStatus.trialEndsAt).diff(dayjs(), "days"), 0), + lnk: (node: React.ReactNode) => ( + {node} + ), + } + ), + }; + } + if (trialStatus.trialStatus === "post_trial") { + return { + level: "info", + content: formatMessage( + { + id: canManageOrganizationBilling ? "billing.banners.postTrialWithLink" : "billing.banners.postTrial", + }, + { + lnk: (node: React.ReactNode) => ( + {node} + ), + } + ), + }; + } + } + + if (billing.paymentStatus === "okay" && trialStatus?.trialStatus === "in_trial") { + return { + level: "info", + content: formatMessage( + { id: "billing.banners.inTrialWithPaymentMethod" }, + { days: Math.max(dayjs(trialStatus.trialEndsAt).diff(dayjs(), "days"), 0) } + ), + }; + } + return undefined; }; diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx index bc888d158ff..6e1d73a91ee 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/AccountBalance/AccountBalance.tsx @@ -47,7 +47,7 @@ export const AccountBalance = () => { - {balance?.credits?.blocks?.length && } + {!!balance?.credits?.blocks?.length && } diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx index b8c28d0e01b..3f7fc620266 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/BillingBanners.tsx @@ -9,7 +9,7 @@ import { useBillingStatusBanner } from "packages/cloud/area/billing/utils/useBil export const BillingBanners: React.FC = () => { const { formatMessage } = useIntl(); - const billingBanner = useBillingStatusBanner(); + const billingBanner = useBillingStatusBanner("billing_page"); const isAutoRechargeEnabled = useExperiment("billing.autoRecharge"); return ( diff --git a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx index 6f970e61fb6..e36645e04de 100644 --- a/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx +++ b/airbyte-webapp/src/packages/cloud/views/billing/OrganizationBillingPage/OrganizationBillingPage.tsx @@ -70,6 +70,7 @@ export const OrganizationBillingPage: React.FC = () => { } /> )} + = (props) => { - const workspace = useCurrentWorkspace(); - const cloudWorkspace = useGetCloudWorkspaceAsync(workspace.workspaceId); - return ( - {cloudWorkspace && } + } />
From 1564b78b18ef8efc074d7d4a3166af1149abbfe7 Mon Sep 17 00:00:00 2001 From: Bryce Groff Date: Tue, 1 Oct 2024 11:13:23 -0700 Subject: [PATCH 33/36] chore: add the no lint version of the spec generation (#14105) Co-authored-by: Malik Diarra --- airbyte-api/build.gradle.kts | 16 +- .../server-api/src/main/openapi/api.yaml | 2815 + .../api_documentation_applications.yaml | 1269 + .../api_documentation_connections.yaml | 1292 + .../api_documentation_destinations.yaml | 19739 +++ .../main/openapi/api_documentation_jobs.yaml | 1312 + .../api_documentation_organizations.yaml | 1122 + .../api_documentation_permissions.yaml | 1258 + .../openapi/api_documentation_sources.yaml | 45484 +++++++ .../openapi/api_documentation_streams.yaml | 1149 + .../main/openapi/api_documentation_users.yaml | 1147 + .../openapi/api_documentation_workspaces.yaml | 2196 + .../server-api/src/main/openapi/api_sdk.yaml | 65046 ++++++++++ .../src/main/openapi/api_terraform.yaml | 100904 +++++++++++++++ 14 files changed, 244747 insertions(+), 2 deletions(-) create mode 100644 airbyte-api/server-api/src/main/openapi/api.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_sdk.yaml create mode 100644 airbyte-api/server-api/src/main/openapi/api_terraform.yaml diff --git a/airbyte-api/build.gradle.kts b/airbyte-api/build.gradle.kts index b1da6a014fd..7ea62f83e4a 100644 --- a/airbyte-api/build.gradle.kts +++ b/airbyte-api/build.gradle.kts @@ -8,7 +8,19 @@ plugins { airbyte { spotless { - excludes = listOf(project(":oss:airbyte-api:workload-api").file("src/main/openapi/workload-openapi.yaml").path) + excludes = listOf( + project(":oss:airbyte-api:workload-api").file("src/main/openapi/workload-openapi.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api.yaml").path, + + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_sdk.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_terraform.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_connections.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_sources.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_destinations.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_streams.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_jobs.yaml").path, + project(":oss:airbyte-api:server-api").file("src/main/openapi/api_documentation_workspaces.yaml").path, + ) } } @@ -16,4 +28,4 @@ dependencies { project.subprojects.forEach { subProject -> implementation(project(subProject.path)) } -} \ No newline at end of file +} diff --git a/airbyte-api/server-api/src/main/openapi/api.yaml b/airbyte-api/server-api/src/main/openapi/api.yaml new file mode 100644 index 00000000000..976150c2a9b --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api.yaml @@ -0,0 +1,2815 @@ +openapi: 3.1.0 +info: + title: airbyte-api + version: 1.0.0 + description: Programatically control Airbyte Cloud, OSS & Enterprise. +servers: + - url: https://api.airbyte.com/v1 + description: Airbyte API v1 +paths: + /: + get: + tags: + - public_root + - public + responses: + "200": + content: + text/html: {} + description: Redirects to documentation + operationId: getDocumentation + x-speakeasy-alias: getDocumentation + summary: Root path, currently returns a redirect to the documentation + security: [] + /health: + get: + tags: + - public_health + - public + responses: + "200": + description: Successful operation + operationId: getHealthCheck + summary: Health Check + security: [] + x-speakeasy-alias: getHealthCheck + x-speakeasy-group: Health + /applications: + get: + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationReadList" + examples: + Application List Response Example: + value: + applications: + - id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 1717969830000 + description: List all Applications a User has permission to view. + "403": + description: Not allowed + operationId: listApplications + summary: List Applications + x-speakeasy-alias: listApplications + x-speakeasy-group: Applications + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationCreate" + examples: + Application Creation Request Example: + value: + name: test application + required: true + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Creation Response Example: + value: + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 1717969830000 + description: Creates a new Application. + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createApplication + summary: Create an Application + x-speakeasy-alias: createApplication + x-speakeasy-group: Applications + /applications/{applicationId}: + get: + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Get Response Example: + value: + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 1717969830000 + description: Get an Application by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: getApplication + x-speakeasy-alias: getApplication + x-speakeasy-group: Applications + summary: Get an Application detail + delete: + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Delete Response Example: + value: + id: 780d5bd9-a8a0-43cf-8b35-cc2061ad8319 + name: test application + clientId: b6b159ce-07f4-4699-94b3-3e85b318852e + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + createdAt: 1717969830000 + description: Delete an Application. + "403": + description: Not allowed + "404": + description: Not found + operationId: deleteApplication + x-speakeasy-alias: deleteApplication + x-speakeasy-group: Applications + summary: Deletes an Application + parameters: + - name: applicationId + schema: + type: string + in: path + required: true + /applications/token: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationTokenRequestWithGrant" + application/x-www-form-urlencoded: + schema: + $ref: "#/components/schemas/ApplicationTokenRequestWithGrant" + examples: + Application Token Request Example: + value: + clientId: 0da998a2-0d7b-49c7-bb6e-9f7eb9cc68a0 + clientSecret: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + required: true + tags: + - public_applications + - public + - Applications + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PublicAccessTokenResponse" + examples: + Application Creation Response Example: + value: + access_token: TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg== + description: Creates an Access Token. + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createAccessToken + summary: Get an Access Token + x-speakeasy-alias: createAccessToken + x-speakeasy-group: Applications + security: [] + /jobs: + get: + tags: + - public_jobs + - public + - Jobs + parameters: + - name: connectionId + description: Filter the Jobs by connectionId. + schema: + format: UUID + type: string + in: query + required: false + - name: limit + description: + Set the limit on the number of Jobs returned. The default is + 20 Jobs. + schema: + format: int32 + default: 20 + maximum: 100 + minimum: 1 + type: integer + in: query + - name: offset + description: + Set the offset to start at when returning Jobs. The default is + 0. + schema: + format: int32 + default: 0 + minimum: 0 + type: integer + in: query + - name: jobType + description: Filter the Jobs by jobType. + schema: + $ref: "#/components/schemas/JobTypeEnum" + in: query + - name: workspaceIds + description: + The UUIDs of the workspaces you wish to list jobs for. Empty + list will retrieve all allowed workspaces. + schema: + type: array + items: + format: uuid + type: string + in: query + required: false + - name: status + description: The Job status you want to filter by + schema: + $ref: "#/components/schemas/JobStatusEnum" + in: query + required: false + - name: createdAtStart + description: The start date to filter by + schema: + type: string + format: date-time + in: query + required: false + example: 1687450500000 + - name: createdAtEnd + description: The end date to filter by + schema: + type: string + format: date-time + in: query + required: false + example: 1687450500000 + - name: updatedAtStart + description: The start date to filter by + schema: + type: string + format: date-time + example: 1687450500000 + in: query + required: false + - name: updatedAtEnd + description: The end date to filter by + schema: + type: string + format: date-time + in: query + required: false + example: 1687450500000 + - name: orderBy + description: The field and method to use for ordering + schema: + type: string + pattern: \w+|(ASC|DESC) + in: query + required: false + example: updatedAt|DESC + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobsResponse" + examples: + Job List Response Example: + value: + next: https://api.airbyte.com/v1/jobs?limit=5&offset=10 + previous: https://api.airbyte.com/v1/jobs?limit=5&offset=0 + data: + - id: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + status: running + jobType: sync + description: List all the Jobs by connectionId. + "403": + description: Not allowed + operationId: listJobs + summary: List Jobs by sync type + x-speakeasy-alias: listJobs + x-speakeasy-group: Jobs + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/JobCreateRequest" + examples: + Job Creation Request Example: + value: + connectionId: e735894a-e773-4938-969f-45f53957b75b + jobType: sync + required: true + tags: + - public_jobs + - public + - Jobs + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Creation Response Example: + value: + jobId: 1234 + status: running + jobType: sync + description: + Kicks off a new Job based on the JobType. The connectionId + is the resource that Job will be run for. + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createJob + summary: Trigger a sync or reset job of a connection + x-speakeasy-alias: createJob + x-speakeasy-group: Jobs + /jobs/{jobId}: + get: + tags: + - public_jobs + - public + - Jobs + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Get Response Example: + value: + id: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + status: running + jobType: sync + description: Get a Job by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: getJob + x-speakeasy-alias: getJob + x-speakeasy-group: Jobs + summary: Get Job status and details + delete: + tags: + - public_jobs + - public + - Jobs + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + description: Cancel a Job. + "403": + description: Not allowed + "404": + description: Not found + operationId: cancelJob + x-speakeasy-alias: cancelJob + x-speakeasy-group: Jobs + summary: Cancel a running Job + parameters: + - name: jobId + schema: + format: int64 + type: integer + in: path + required: true + /sources: + get: + tags: + - public_sources + - public + - Sources + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourcesResponse" + description: Successful operation + "403": + description: Not allowed + "404": + description: Not found + operationId: listSources + x-speakeasy-alias: listSources + x-speakeasy-group: Sources + summary: List sources + parameters: + - name: workspaceIds + description: + The UUIDs of the workspaces you wish to list sources for. Empty + list will retrieve all allowed workspaces. + schema: + type: array + items: + format: uuid + type: string + example: df08f6b0-b364-4cc1-9b3f-96f5d2fccfb2,b0796797-de23-4fc7-a5e2-7e131314718c + in: query + required: false + - name: includeDeleted + description: Include deleted sources in the returned results. + schema: + default: false + type: boolean + in: query + required: false + - name: limit + description: + Set the limit on the number of sources returned. The default + is 20. + schema: + format: int32 + type: integer + minimum: 1 + maximum: 100 + default: 20 + in: query + - name: offset + description: + Set the offset to start at when returning sources. The default + is 0 + schema: + type: integer + format: int32 + minimum: 0 + default: 0 + in: query + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCreateRequest" + examples: + Source Creation Request Example: + value: + configuration: + airbyte_source_name: google-ads + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: My Source + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + tags: + - public_sources + - public + - Sources + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Creation Response Example: + value: + sourceId: 0c31738c-0b2d-4887-b506-e2cd1c39cc35 + description: Successful operation + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createSource + x-speakeasy-alias: createSource + x-speakeasy-group: Sources + summary: Create a source + description: + Creates a source given a name, workspace id, and a json blob containing + the configuration for the source. + /sources/{sourceId}: + get: + tags: + - public_sources + - public + - Sources + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Get Response Example: + value: + sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: running + sourceType: postgres + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + description: Get a Source by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: getSource + x-speakeasy-alias: getSource + x-speakeasy-group: Sources + summary: Get Source details + patch: + tags: + - public_sources + - public + - Sources + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePatchRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: google-ads + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: My Source + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: running + sourceType: postgres + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + description: Update a Source + "403": + description: Not allowed + "404": + description: Not found + operationId: patchSource + x-speakeasy-alias: patchSource + x-speakeasy-group: Sources + summary: Update a Source + put: + tags: + - public_sources + - public + - Sources + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePutRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: google-ads + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: My Source + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: running + sourceType: postgres + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + description: Update a source and fully overwrite it + "403": + description: Not allowed + "404": + description: Not found + operationId: putSource + x-speakeasy-alias: putSource + x-speakeasy-group: Sources + summary: Update a Source and fully overwrite it + x-speakeasy-entity-operation: Source#update + delete: + tags: + - public_sources + - public + - Sources + responses: + "204": + description: The resource was deleted successfully + "403": + description: Not allowed + "404": + description: Not found + operationId: deleteSource + x-speakeasy-alias: deleteSource + x-speakeasy-group: Sources + summary: Delete a Source + parameters: + - name: sourceId + schema: + format: UUID + type: string + in: path + required: true + /destinations: + get: + tags: + - public_destinations + - public + - Destinations + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationsResponse" + description: Successful operation + "403": + description: Not allowed + "404": + description: Not found + operationId: listDestinations + x-speakeasy-alias: listDestinations + x-speakeasy-group: Destinations + summary: List destinations + parameters: + - name: workspaceIds + description: + The UUIDs of the workspaces you wish to list destinations for. + Empty list will retrieve all allowed workspaces. + schema: + type: array + items: + format: uuid + type: string + in: query + required: false + - name: includeDeleted + description: Include deleted destinations in the returned results. + schema: + default: false + type: boolean + in: query + required: false + - name: limit + description: + Set the limit on the number of destinations returned. The default + is 20. + schema: + format: int32 + type: integer + minimum: 1 + maximum: 100 + default: 20 + in: query + - name: offset + description: + Set the offset to start at when returning destinations. The default + is 0 + schema: + type: integer + format: int32 + minimum: 0 + default: 0 + in: query + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationCreateRequest" + examples: + Destination Creation Request Example: + value: + name: Postgres + workspaceId: 2155ae5a-de39-4808-af6a-16fe7b8b4ed2 + configuration: + airbyte_destination_name: postgres + port: 5432 + schema: public + ssl_mode: + mode: prefer + tunnel_method: + tunnel_method: NO_TUNNEL + host: localhost + database: postgres + username: postgres + password: test + tags: + - public_destinations + - public + - Destinations + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Creation Response Example: + value: + destinationId: af0c3c67-aa61-419f-8922-95b0bf840e86 + description: Successful operation + "400": + description: Invalid data + "403": + description: Not allowed + "404": + description: Not found + operationId: createDestination + x-speakeasy-alias: createDestination + x-speakeasy-group: Destinations + summary: Create a destination + description: + Creates a destination given a name, workspace id, and a json blob + containing the configuration for the source. + /destinations/{destinationId}: + get: + tags: + - public_destinations + - public + - Destinations + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Get Response Example: + value: + destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: My Destination + sourceType: postgres + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: Get a Destination by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: getDestination + x-speakeasy-alias: getDestination + x-speakeasy-group: Destinations + summary: Get Destination details + delete: + tags: + - public_destinations + - public + - Destinations + responses: + "204": + description: The resource was deleted successfully + "403": + description: Not allowed + "404": + description: Not found + operationId: deleteDestination + x-speakeasy-alias: deleteDestination + x-speakeasy-group: Destinations + summary: Delete a Destination + patch: + tags: + - public_destinations + - public + - Destinations + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPatchRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: My Destination + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: running + sourceType: postgres + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: Update a Destination + "403": + description: Not allowed + "404": + description: Not found + operationId: patchDestination + x-speakeasy-alias: patchDestination + x-speakeasy-group: Destinations + summary: Update a Destination + put: + tags: + - public_destinations + - public + - Destinations + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPutRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: My Destination + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: running + sourceType: postgres + workspaceId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: Update a Destination and fully overwrite it + "403": + description: Not allowed + "404": + description: Not found + operationId: putDestination + x-speakeasy-alias: putDestination + x-speakeasy-group: Destinations + summary: Update a Destination and fully overwrite it + x-speakeasy-entity-operation: Destination#update + parameters: + - name: destinationId + schema: + format: UUID + type: string + in: path + required: true + /oauth/callback: + get: + tags: + - public_oauth + - public + - OAuth + parameters: + - name: queryParams + description: Query parameters. Should contain state and code. + required: false + schema: + type: object + additionalProperties: + type: string + in: query + responses: + "302": + description: + Redirect to the URL requested in the initiate call with the + created secret ID. + operationId: oauthCallback + x-speakeasy-alias: oauthCallback + x-speakeasy-group: OAuth + summary: Receive OAuth callbacks + description: Redirected to by identity providers after authentication. + security: [] + /sources/initiateOAuth: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/InitiateOauthRequest" + required: true + tags: + - public_sources + - public + - Sources + responses: + "200": + $ref: "#/components/responses/InitiateOauthResponse" + "400": + description: A field in the body has not been set appropriately. + "403": + description: API key is invalid. + operationId: initiateOAuth + x-speakeasy-alias: initiateOAuth + x-speakeasy-group: Sources + summary: Initiate OAuth for a source + description: |- + Given a source ID, workspace ID, and redirect URL, initiates OAuth for the source. + + This returns a fully formed URL for performing user authentication against the relevant source identity provider (IdP). Once authentication has been completed, the IdP will redirect to an Airbyte endpoint which will save the access and refresh tokens off as a secret and return the secret ID to the redirect URL specified in the `secret_id` query string parameter. + + That secret ID can be used to create a source with credentials in place of actual tokens. + /connections: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionCreateRequest" + examples: + Connection Creation Request Example: + value: + sourceId: 95e66a59-8045-4307-9678-63bc3c9b8c93 + destinationId: e478de0d-a3a0-475c-b019-25f7dd29e281 + name: Postgres-to-Bigquery + required: true + tags: + - public_connections + - public + - Connections + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Creation Response Example: + value: + connectionId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + description: Successful operation + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createConnection + x-speakeasy-alias: createConnection + x-speakeasy-group: Connections + summary: Create a connection + x-speakeasy-entity-operation: Connection#create + get: + tags: + - public_connections + - public + - Connections + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionsResponse" + description: Successful operation + "403": + description: Not allowed + "404": + description: Not found + operationId: listConnections + x-speakeasy-alias: listConnections + x-speakeasy-group: Connections + summary: List connections + x-speakeasy-entity-operation: Connection#list + parameters: + - name: workspaceIds + description: + The UUIDs of the workspaces you wish to list connections for. + Empty list will retrieve all allowed workspaces. + schema: + type: array + items: + format: uuid + type: string + in: query + required: false + - name: includeDeleted + description: Include deleted connections in the returned results. + schema: + default: false + type: boolean + in: query + required: false + - name: limit + description: + Set the limit on the number of Connections returned. The default + is 20. + schema: + format: int32 + type: integer + minimum: 1 + maximum: 100 + default: 20 + in: query + - name: offset + description: + Set the offset to start at when returning Connections. The default + is 0 + schema: + type: integer + format: int32 + minimum: 0 + default: 0 + in: query + /connections/{connectionId}: + get: + tags: + - public_connections + - public + - Connections + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Postgres To Snowflake + sourceId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + destinationId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + description: Get a Connection by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: getConnection + x-speakeasy-alias: getConnection + x-speakeasy-group: Connections + summary: Get Connection details + x-speakeasy-entity-operation: Connection#read + patch: + tags: + - public_connections + - public + - Connections + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionPatchRequest" + examples: + Connection Update Request Example: + value: + sourceId: 95e66a59-8045-4307-9678-63bc3c9b8c93 + destinationId: e478de0d-a3a0-475c-b019-25f7dd29e281 + name: Postgres-to-Bigquery + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Postgres To Snowflake + sourceId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + destinationId: 744cc0ed-7f05-4949-9e60-2a814f90c035 + description: Update a Connection by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: patchConnection + x-speakeasy-alias: patchConnection + x-speakeasy-group: Connections + summary: Update Connection details + x-speakeasy-entity-operation: Connection#update + delete: + tags: + - public_connections + - public + - Connections + responses: + "204": + description: The resource was deleted successfully + "403": + description: Not allowed + "404": + description: Not found + operationId: deleteConnection + x-speakeasy-alias: deleteConnection + x-speakeasy-group: Connections + summary: Delete a Connection + x-speakeasy-entity-operation: Connection#delete + parameters: + - name: connectionId + schema: + format: UUID + type: string + in: path + required: true + /streams: + get: + tags: + - public_streams + - public + - Streams + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/StreamPropertiesResponse" + description: + Get the available streams properties for a source/destination + pair. + "400": + description: Required parameters are missing + "403": + description: Not allowed + "404": + description: Not found + operationId: getStreamProperties + x-speakeasy-alias: getStreamProperties + x-speakeasy-group: Streams + summary: Get stream properties + parameters: + - name: sourceId + description: ID of the source + schema: + format: UUID + type: string + in: query + required: true + - name: destinationId + description: ID of the destination + schema: + format: UUID + type: string + in: query + required: false + - name: ignoreCache + description: + If true pull the latest schema from the source, else pull from + cache (default false) + schema: + type: boolean + default: false + in: query + required: false + /workspaces: + get: + tags: + - public_workspaces + - public + - Workspaces + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspacesResponse" + description: Successful operation + "403": + description: Not allowed + "404": + description: Not found + operationId: listWorkspaces + x-speakeasy-alias: listWorkspaces + x-speakeasy-group: Workspaces + summary: List workspaces + x-speakeasy-entity-operation: Workspace#list + parameters: + - name: workspaceIds + description: + The UUIDs of the workspaces you wish to fetch. Empty list will + retrieve all allowed workspaces. + schema: + type: array + items: + format: uuid + type: string + in: query + required: false + - name: includeDeleted + description: Include deleted workspaces in the returned results. + schema: + default: false + type: boolean + in: query + required: false + - name: limit + description: + Set the limit on the number of workspaces returned. The default + is 20. + schema: + format: int32 + type: integer + minimum: 1 + maximum: 100 + default: 20 + in: query + - name: offset + description: + Set the offset to start at when returning workspaces. The default + is 0 + schema: + type: integer + format: int32 + minimum: 0 + default: 0 + in: query + post: + tags: + - public_workspaces + - public + - Workspaces + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceCreateRequest" + examples: + Workspace Creation Request Example: + value: + name: Company Workspace Name + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Creation Response Example: + value: + workspaceId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + description: Successful operation + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createWorkspace + x-speakeasy-alias: createWorkspace + x-speakeasy-group: Workspaces + summary: Create a workspace + x-speakeasy-entity-operation: Workspace#create + /workspaces/{workspaceId}: + parameters: + - name: workspaceId + schema: + format: UUID + type: string + in: path + required: true + get: + tags: + - public_workspaces + - public + - Workspaces + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Get Response Example: + value: + workspaceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Acme Company + dataResidency: auto + description: Get a Workspace by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + operationId: getWorkspace + x-speakeasy-alias: getWorkspace + x-speakeasy-group: Workspaces + summary: Get Workspace details + x-speakeasy-entity-operation: Workspace#read + patch: + tags: + - public_workspaces + - public + - Workspaces + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceUpdateRequest" + examples: + Workspace Update Request Example: + value: + name: Company Workspace Name + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Update Response Example: + value: + workspaceId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + description: Successful operation + "400": + description: Invalid data + "403": + description: Not allowed + operationId: updateWorkspace + x-speakeasy-alias: updateWorkspace + x-speakeasy-group: Workspaces + summary: Update a workspace + x-speakeasy-entity-operation: Workspace#update + delete: + tags: + - public_workspaces + - public + - Workspaces + responses: + "204": + description: The resource was deleted successfully + "403": + description: Not allowed + "404": + description: Not found + operationId: deleteWorkspace + x-speakeasy-alias: deleteWorkspace + x-speakeasy-group: Workspaces + summary: Delete a Workspace + x-speakeasy-entity-operation: Workspace#delete + /workspaces/{workspaceId}/oauthCredentials: + put: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceOAuthCredentialsRequest" + required: true + tags: + - public_workspaces + - public + - Workspaces + responses: + "200": + description: OAuth credential override was successful. + "400": + description: A field in the body has not been set appropriately. + "403": + description: API key is invalid. + operationId: createOrUpdateWorkspaceOAuthCredentials + x-speakeasy-alias: createOrUpdateWorkspaceOAuthCredentials + x-speakeasy-group: Workspaces + summary: Create OAuth override credentials for a workspace and source type. + description: |- + Create/update a set of OAuth credentials to override the Airbyte-provided OAuth credentials used for source/destination OAuth. + In order to determine what the credential configuration needs to be, please see the connector specification of the relevant source/destination. + parameters: + - name: workspaceId + schema: + format: UUID + type: string + in: path + required: true + /permissions/{permissionId}: + parameters: + - name: permissionId + schema: + format: UUID + type: string + in: path + required: true + get: + tags: + - public_permissions + - public + - Permissions + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: Get a Permission by the id in the path. + "403": + description: Not allowed + "404": + description: Not found + "422": + description: Data issue + operationId: getPermission + summary: Get Permission details + x-speakeasy-alias: getPermission + x-speakeasy-group: Permissions + x-speakeasy-entity-operation: Permission#read + patch: + tags: + - public_permissions + - public + - Permissions + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionUpdateRequest" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: Successful updated + "400": + description: Invalid data + "403": + description: Not allowed + "404": + description: Not found + "422": + description: Data issue + operationId: updatePermission + summary: Update a permission + x-speakeasy-alias: updatePermission + x-speakeasy-group: Permissions + x-speakeasy-entity-operation: Permission#update + delete: + tags: + - public_permissions + - public + - Permissions + responses: + "204": + description: The resource was deleted successfully + "403": + description: Not allowed + "404": + description: Not found + "422": + description: Data issue + operationId: deletePermission + x-speakeasy-alias: deletePermission + x-speakeasy-group: Permissions + summary: Delete a Permission + x-speakeasy-entity-operation: Permission#delete + /permissions: + get: + tags: + - public_permissions + - public + - Permissions + parameters: + - name: userId + description: User Id in permission. + schema: + format: UUID + type: string + in: query + required: false + - name: organizationId + description: + This is required if you want to read someone else's permissions, + and you should have organization admin or a higher role. + schema: + format: UUID + type: string + in: query + required: false + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionsResponse" + description: List Permissions. + "403": + description: Not allowed + "404": + description: Not found + operationId: listPermissions + x-speakeasy-alias: listPermissions + x-speakeasy-group: Permissions + summary: List Permissions by user id + post: + tags: + - public_permissions + - public + - Permissions + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionCreateRequest" + examples: + Permission Creation Request Example: + value: + permissionType: workspace_admin + userId: 7d08fd6c-531e-4a00-937e-3d355f253e63 + workspaceId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + examples: + Permission Creation Response Example: + value: + permissionId: 9924bcd0-99be-453d-ba47-c2c9766f7da5 + permissionType: workspace_admin + userId: 7d08fd6c-531e-4a00-937e-3d355f253e63 + description: Successful operation + "400": + description: Invalid data + "403": + description: Not allowed + operationId: createPermission + x-speakeasy-alias: createPermission + x-speakeasy-group: Permissions + summary: Create a permission + x-speakeasy-entity-operation: Permission#create + /organizations: + get: + tags: + - public_organizations + - public + - Organizations + summary: List all organizations for a user + description: Lists users organizations. + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationsResponse" + description: List user's organizations. + "403": + description: Not allowed + "404": + description: Not found + operationId: listOrganizationsForUser + x-speakeasy-alias: listOrganizationsForUser + x-speakeasy-group: Organizations + /users: + get: + tags: + - public_users + - public + - Users + summary: List all users within an organization + description: + Organization Admin user can list all users within the same organization. + Also provide filtering on a list of user IDs or/and a list of user emails. + parameters: + - in: query + name: organizationId + schema: + type: string + format: UUID + required: true + - in: query + name: ids + schema: + type: array + items: + type: string + format: UUID + description: List of user IDs to filter by + - in: query + name: emails + schema: + type: array + items: + type: string + format: email + description: List of user emails to filter by + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/UsersResponse" + description: List Users. + "403": + description: Not allowed + "404": + description: Not found + operationId: listUsersWithinAnOrganization + x-speakeasy-alias: listUsersWithinAnOrganization + x-speakeasy-group: Users +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + Response from the initiate OAuth call should be an object with + a single property which will be the `redirect_url`. If a user is redirected + to this URL, they'll be prompted by the identity provider to authenticate. + x-speakeasy-component: true + schemas: + WorkspaceId: + type: string + format: uuid + x-speakeasy-component: true + SourceConfiguration: + description: + The values required to configure the source. The schema for this + must match the schema return by source_definition_specifications/get for the + source. + example: + user: charles + x-speakeasy-component: true + DestinationConfiguration: + description: + The values required to configure the destination. The schema for + this must match the schema return by destination_definition_specifications/get + for the destinationDefinition. + example: + user: charles + x-speakeasy-component: true + OrganizationId: + type: string + format: uuid + x-speakeasy-component: true + PermissionType: + type: string + description: Describes what actions/endpoints the permission entitles to + enum: + - instance_admin + - organization_admin + - organization_editor + - organization_reader + - organization_member + - workspace_owner + - workspace_admin + - workspace_editor + - workspace_reader + x-speakeasy-component: true + PublicPermissionType: + type: string + description: + Subset of `PermissionType` (removing `instance_admin`), could be + used in public-api. + enum: + - organization_admin + - organization_editor + - organization_reader + - organization_member + - workspace_admin + - workspace_editor + - workspace_reader + x-speakeasy-component: true + UserId: + type: string + description: Internal Airbyte user ID + format: uuid + x-speakeasy-component: true + AuthProvider: + type: string + description: Auth Provider + default: airbyte + enum: + - airbyte + - google_identity_platform + - keycloak + x-speakeasy-component: true + UserStatus: + type: string + description: user status + enum: + - invited + - registered + - disabled + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: object + description: + Path to a field/column/property in a stream to be selected. For + example, if the field to be selected is a database column called "foo", this + will be ["foo"]. Use multiple path elements for nested schemas. + properties: + fieldPath: + type: array + items: + type: string + x-speakeasy-component: true + SelectedFields: + description: Paths to the fields that will be included in the configured catalog. + type: array + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + The values required to configure OAuth flows. The schema for this + must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification` + schema. + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - name + type: object + properties: + name: + type: string + x-speakeasy-component: true + ApplicationReadList: + required: + - applications + type: object + properties: + applications: + type: array + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - id + - name + - clientId + - clientSecret + - createdAt + type: object + properties: + id: + type: string + name: + type: string + clientId: + type: string + clientSecret: + type: string + createdAt: + type: integer + format: int64 + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - client_id + - client_secret + - grant_type + type: object + properties: + client_id: + type: string + client_secret: + type: string + grant-type: + enum: + - client_credentials + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - access_token + - token_type + - expires_in + type: object + properties: + access_token: + type: string + token_type: + enum: + - Bearer + expires_in: + type: integer + format: int64 + x-speakeasy-component: true + RedirectUrlResponse: + title: Root Type for RedirectUrlResponse + description: "" + type: object + properties: + redirectUrl: + format: url + type: string + example: + redirectUrl: https://example.com + x-speakeasy-component: true + JobResponse: + title: Root Type for JobResponse + description: Provides details of a single job. + required: + - jobId + - status + - jobType + - startTime + - connectionId + type: object + properties: + jobId: + format: int64 + type: integer + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: string + connectionId: + format: UUID + type: string + lastUpdatedAt: + type: string + duration: + description: Duration of a sync in ISO_8601 format + type: string + bytesSynced: + format: int64 + type: integer + rowsSynced: + format: int64 + type: integer + example: + id: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + status: running + jobType: sync + startTime: 2023-03-25T01:30:50Z + duration: PT8H6M12S + x-speakeasy-component: true + JobsResponse: + title: Root Type for JobsResponse + description: "" + required: + - data + type: object + properties: + previous: + type: string + next: + type: string + data: + type: array + items: + $ref: "#/components/schemas/JobResponse" + example: + next: https://api.airbyte.com/v1/jobs?limit=5&offset=10 + previous: https://api.airbyte.com/v1/jobs?limit=5&offset=0 + data: + - id: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + status: running + jobType: sync + startTime: 2023-03-25T01:30:50Z + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - sourceId + - destinationId + type: object + properties: + name: + description: Optional name of the connection + type: string + sourceId: + format: uuid + type: string + destinationId: + format: uuid + type: string + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: string + description: + Used when namespaceDefinition is 'custom_format'. If blank + then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" + then behaves like namespaceDefinition = 'source'. + default: null + example: ${SOURCE_NAMESPACE} + prefix: + type: string + description: + Prefix that will be prepended to the name of each stream when + it is written to the destination (ex. “airbyte_” causes “projects” => + “airbyte_projects”). + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: Connection + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: object + properties: + name: + description: Optional name of the connection + type: string + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: string + description: + Used when namespaceDefinition is 'custom_format'. If blank + then behaves like namespaceDefinition = 'destination'. If "${SOURCE_NAMESPACE}" + then behaves like namespaceDefinition = 'source'. + default: null + example: ${SOURCE_NAMESPACE} + prefix: + type: string + description: + Prefix that will be prepended to the name of each stream when + it is written to the destination (ex. “airbyte_” causes “projects” => + “airbyte_projects”). + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: Connection + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: Root Type for JobCreate + description: + Creates a new Job from the configuration provided in the request + body. + required: + - jobType + - connectionId + type: object + properties: + connectionId: + format: UUID + type: string + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + jobType: sync + x-speakeasy-component: true + JobStatusEnum: + enum: + - pending + - running + - incomplete + - failed + - succeeded + - cancelled + type: string + x-speakeasy-component: true + JobTypeEnum: + description: + Enum that describes the different types of jobs that the platform + runs. + enum: + - sync + - reset + - refresh + - clear + type: string + x-speakeasy-component: true + SourceCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: + The UUID of the connector definition. One of configuration.sourceType + or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: Optional secretID obtained through the OAuth redirect flow. + type: string + x-implements: io.airbyte.api.common.ConfigurableActor + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - name + - configuration + type: object + properties: + name: + type: string + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: io.airbyte.api.common.ConfigurableActor + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: object + properties: + name: + type: string + example: My source + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: Optional secretID obtained through the OAuth redirect flow. + type: string + x-implements: io.airbyte.api.common.ConfigurableActor + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + InitiateOauthRequest: + title: Root Type for initiate-oauth-post-body + required: + - redirectUrl + - workspaceId + type: object + properties: + name: + description: + The name of the source to authenticate to. Deprecated - use + sourceType instead. + type: string + sourceType: + description: The name of the source to authenticate to + type: string + redirectUrl: + description: + The URL to redirect the user to with the OAuth secret stored + in the secret_id query string parameter after authentication is complete. + type: string + workspaceId: + format: uuid + description: + The workspace to create the secret and eventually the full + source. + type: string + oAuthInputConfiguration: + $ref: "#/components/schemas/OAuthInputConfiguration" + description: Input configuration for OAuth required by some sources. + example: + redirectUrl: https://cloud.airbyte.io/v1/api/oauth/callback + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + destinationId: 3d93b16c-ff5f-421c-8908-5a3c82088f14 + x-speakeasy-component: true + WorkspaceOAuthCredentialsRequest: + title: Root Type for WorkspaceOAuthCredentials + description: POST body for creating/updating workspace level OAuth credentials + required: + - actorType + - name + - configuration + type: object + properties: + actorType: + $ref: "#/components/schemas/ActorTypeEnum" + name: + type: string + description: The name of the source i.e. google-ads + configuration: + $ref: "#/components/schemas/OAuthCredentialsConfiguration" + x-speakeasy-component: true + OAuthCredentialsConfiguration: + description: + The configuration for this source/destination based on the OAuth + section of the relevant specification. + type: object + example: + credentials: + client_id: 871d9b60-11d1-44cb-8c92-c246d53bf87e + client_secret: shhhhhh + x-speakeasy-component: true + ConnectionResponse: + title: Root Type for ConnectionResponse + description: Provides details of a single connection. + type: object + required: + - connectionId + - name + - sourceId + - destinationId + - workspaceId + - status + - schedule + - dataResidency + - configurations + properties: + connectionId: + format: UUID + type: string + name: + type: string + sourceId: + format: UUID + type: string + destinationId: + format: UUID + type: string + workspaceId: + format: UUID + type: string + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: string + prefix: + type: string + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: Connection + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + schedule for when the the connection should run, per the schedule + type + type: object + required: + - scheduleType + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: string + x-speakeasy-component: true + ScheduleTypeEnum: + type: string + enum: + - manual + - cron + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + schedule for when the the connection should run, per the schedule + type + type: object + required: + - scheduleType + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: string + basicTiming: + type: string + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: string + enum: + - manual + - cron + - basic + x-speakeasy-component: true + GeographyEnum: + type: string + enum: + - auto + - us + - eu + default: auto + x-speakeasy-component: true + GeographyEnumNoDefault: + type: string + enum: + - auto + - us + - eu + x-speakeasy-component: true + ConnectionStatusEnum: + type: string + enum: + - active + - inactive + - deprecated + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: string + description: Define the location where the data will be stored in the destination + enum: + - source + - destination + - custom_format + default: destination + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: string + description: + Set how Airbyte handles syncs when it detects a non-breaking schema + change in the source + enum: + - ignore + - disable_connection + - propagate_columns + - propagate_fully + default: ignore + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: string + description: Define the location where the data will be stored in the destination + enum: + - source + - destination + - custom_format + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: string + description: + Set how Airbyte handles syncs when it detects a non-breaking schema + change in the source + enum: + - ignore + - disable_connection + - propagate_columns + - propagate_fully + x-speakeasy-component: true + DestinationResponse: + title: Root Type for DestinationResponse + description: Provides details of a single destination. + type: object + required: + - destinationId + - name + - destinationType + - workspaceId + - configuration + properties: + destinationId: + format: UUID + type: string + name: + type: string + destinationType: + type: string + workspaceId: + format: UUID + type: string + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Analytics Team Postgres + destinationType: postgres + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + x-speakeasy-component: true + SourceResponse: + title: Root Type for SourceResponse + description: Provides details of a single source. + type: object + required: + - sourceId + - name + - sourceType + - workspaceId + - configuration + properties: + sourceId: + format: UUID + type: string + name: + type: string + sourceType: + type: string + workspaceId: + format: UUID + type: string + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Analytics Team Postgres + sourceType: postgres + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + x-speakeasy-component: true + DestinationCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: + The UUID of the connector definition. One of configuration.destinationType + or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: io.airbyte.api.common.ConfigurableActor + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: object + properties: + name: + type: string + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: io.airbyte.api.common.ConfigurableActor + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - name + - configuration + type: object + properties: + name: + type: string + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: io.airbyte.api.common.ConfigurableActor + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - name + type: object + properties: + name: + description: Name of the workspace + type: string + organizationId: + description: ID of organization to add workspace to. + format: uuid + type: string + x-speakeasy-entity: Workspace + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - name + type: object + properties: + name: + description: Name of the workspace + type: string + x-speakeasy-entity: Workspace + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: Root Type for WorkspaceResponse + description: Provides details of a single workspace. + type: object + required: + - workspaceId + - name + - dataResidency + properties: + workspaceId: + format: UUID + type: string + name: + type: string + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: Workspace + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: Root Type for UserResponse + description: Provides details of a single user in an organization. + type: object + required: + - id + - name + - email + properties: + name: + description: Name of the user + type: string + id: + $ref: "#/components/schemas/UserId" + email: + type: string + format: email + x-speakeasy-entity: User + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: Root Type for UsersResponse + description: List/Array of multiple users in an organization + required: + - data + type: object + properties: + data: + type: array + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: User + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - permissionType + - userId + type: object + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - permissionType + type: object + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: Root Type for PermissionResponse + description: Provides details of a single permission. + type: object + required: + - permissionId + - permissionType + - userId + properties: + permissionId: + type: string + format: uuid + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: Scope of a single permission, e.g. workspace, organization + type: string + enum: + - workspace + - organization + - none + x-speakeasy-component: true + PermissionResponseRead: + title: Root type for PermissionResponseRead + description: Reformat PermissionResponse with permission scope + type: object + required: + - permissionId + - permissionType + - userId + - scope + - scopeId + properties: + permissionId: + type: string + format: uuid + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: string + format: uuid + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: Root Type for PermissionsResponse + description: List/Array of multiple permissions + required: + - data + type: object + properties: + data: + type: array + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: Root Type for OrganizationResponse + description: Provides details of a single organization for a user. + type: object + required: + - organizationId + - organizationName + - email + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: string + email: + type: string + format: email + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: Root Type for OrganizationsResponse + description: List/Array of multiple organizations. + required: + - data + type: object + properties: + data: + type: array + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: Root Type for ConnectionsResponse + description: "" + required: + - data + type: object + properties: + previous: + type: string + next: + type: string + data: + type: array + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: https://api.airbyte.com/v1/connections?limit=5&offset=10 + previous: https://api.airbyte.com/v1/connections?limit=5&offset=0 + data: + - name: test-connection + - connection_id: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + - sourceId: 49237019-645d-47d4-b45b-5eddf97775ce + - destinationId: al312fs-0ab1-4f72-9ed7-0b8fc27c5826 + - schedule: + scheduleType: manual + - status: active + - dataResidency: auto + x-speakeasy-component: true + SourcesResponse: + title: Root Type for SourcesResponse + description: "" + required: + - data + type: object + properties: + previous: + type: string + next: + type: string + data: + type: array + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: https://api.airbyte.com/v1/sources?limit=5&offset=10 + previous: https://api.airbyte.com/v1/sources?limit=5&offset=0 + data: + sourceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Analytics Team Postgres + sourceType: postgres + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + x-speakeasy-component: true + DestinationsResponse: + title: Root Type for DestinationsResponse + description: "" + required: + - data + type: object + properties: + previous: + type: string + next: + type: string + data: + type: array + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: https://api.airbyte.com/v1/destinations?limit=5&offset=10 + previous: https://api.airbyte.com/v1/destinations?limit=5&offset=0 + data: + destinationId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Analytics Team Postgres + destinationType: postgres + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + x-speakeasy-component: true + WorkspacesResponse: + title: Root Type for WorkspacesResponse + description: "" + required: + - data + type: object + properties: + previous: + type: string + next: + type: string + data: + type: array + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: https://api.airbyte.com/v1/workspaces?limit=5&offset=10 + previous: https://api.airbyte.com/v1/workspaces?limit=5&offset=0 + data: + workspaceId: 18dccc91-0ab1-4f72-9ed7-0b8fc27c5826 + name: Acme Company + dataResidency: auto + x-speakeasy-component: true + StreamConfiguration: + description: Configurations for a single stream. + type: object + required: + - name + properties: + name: + type: string + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + Path to the field that will be used to determine if a record + is new or modified since the last sync. This field is REQUIRED if `sync_mode` + is `incremental` unless there is a default. + type: array + items: + type: string + primaryKey: + description: + Paths to the fields that will be used as primary key. This + field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it is + already supplied by the source schema. + type: array + items: + type: array + items: + type: string + selectedFields: + description: + By default (if not provided in the request) all fields will + be synced. Otherwise, only the fields in this list will be synced. + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: A list of configured stream options for a connection. + type: object + properties: + streams: + type: array + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: A list of stream properties. + type: array + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: The stream properties associated with a connection. + type: object + properties: + streamName: + type: string + syncModes: + type: array + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: array + items: + type: string + sourceDefinedCursorField: + type: boolean + sourceDefinedPrimaryKey: + type: array + items: + type: array + items: + type: string + propertyFields: + type: array + items: + type: array + items: + type: string + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - full_refresh_overwrite + - full_refresh_append + - incremental_append + - incremental_deduped_history + x-speakeasy-component: true + ActorTypeEnum: + description: Whether you're setting this override for a source or destination + enum: + - source + - destination + x-speakeasy-component: true diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml new file mode 100644 index 00000000000..8753442ca43 --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_applications.yaml @@ -0,0 +1,1269 @@ +--- +openapi: "3.1.0" +info: + title: "Applications" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /applications: + get: + tags: + - "public_applications" + - "public" + - "Applications" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationReadList" + examples: + Application List Response Example: + value: + applications: + - id: "780d5bd9-a8a0-43cf-8b35-cc2061ad8319" + name: "test application" + clientId: "b6b159ce-07f4-4699-94b3-3e85b318852e" + clientSecret: "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==" + createdAt: 1717969830000 + description: "List all Applications a User has permission to view." + "403": + description: "Not allowed" + operationId: "listApplications" + summary: "List Applications" + x-speakeasy-alias: "listApplications" + x-speakeasy-group: "Applications" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationCreate" + examples: + Application Creation Request Example: + value: + name: "test application" + required: true + tags: + - "public_applications" + - "public" + - "Applications" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Creation Response Example: + value: + id: "780d5bd9-a8a0-43cf-8b35-cc2061ad8319" + name: "test application" + clientId: "b6b159ce-07f4-4699-94b3-3e85b318852e" + clientSecret: "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==" + createdAt: 1717969830000 + description: "Creates a new Application." + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createApplication" + summary: "Create an Application" + x-speakeasy-alias: "createApplication" + x-speakeasy-group: "Applications" + /applications/{applicationId}: + get: + tags: + - "public_applications" + - "public" + - "Applications" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Get Response Example: + value: + id: "780d5bd9-a8a0-43cf-8b35-cc2061ad8319" + name: "test application" + clientId: "b6b159ce-07f4-4699-94b3-3e85b318852e" + clientSecret: "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==" + createdAt: 1717969830000 + description: "Get an Application by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getApplication" + x-speakeasy-alias: "getApplication" + x-speakeasy-group: "Applications" + summary: "Get an Application detail" + delete: + tags: + - "public_applications" + - "public" + - "Applications" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationRead" + examples: + Application Delete Response Example: + value: + id: "780d5bd9-a8a0-43cf-8b35-cc2061ad8319" + name: "test application" + clientId: "b6b159ce-07f4-4699-94b3-3e85b318852e" + clientSecret: "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==" + createdAt: 1717969830000 + description: "Delete an Application." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteApplication" + x-speakeasy-alias: "deleteApplication" + x-speakeasy-group: "Applications" + summary: "Deletes an Application" + parameters: + - name: "applicationId" + schema: + type: "string" + in: "path" + required: true + /applications/token: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ApplicationTokenRequestWithGrant" + application/x-www-form-urlencoded: + schema: + $ref: "#/components/schemas/ApplicationTokenRequestWithGrant" + examples: + Application Token Request Example: + value: + clientId: "0da998a2-0d7b-49c7-bb6e-9f7eb9cc68a0" + clientSecret: "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==" + required: true + tags: + - "public_applications" + - "public" + - "Applications" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PublicAccessTokenResponse" + examples: + Application Creation Response Example: + value: + access_token: "TG9yZW0gaXBzdW0gZG9sb3Igc2l0IGFtZXQsIGNvbnNlY3RldHVyIGFkaXBpc2NpbmcgZWxpdC4gU2VkIGNvbmRpbWVudHVtIG5lYyBsaWJlcm8gc2VkIGxvYm9ydGlzLg==" + description: "Creates an Access Token." + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createAccessToken" + summary: "Get an Access Token" + x-speakeasy-alias: "createAccessToken" + x-speakeasy-group: "Applications" + security: [] +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml new file mode 100644 index 00000000000..6fc2ab0c44f --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_connections.yaml @@ -0,0 +1,1292 @@ +--- +openapi: "3.1.0" +info: + title: "Connections" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /connections: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionCreateRequest" + examples: + Connection Creation Request Example: + value: + sourceId: "95e66a59-8045-4307-9678-63bc3c9b8c93" + destinationId: "e478de0d-a3a0-475c-b019-25f7dd29e281" + name: "Postgres-to-Bigquery" + required: true + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Creation Response Example: + value: + connectionId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createConnection" + x-speakeasy-alias: "createConnection" + x-speakeasy-group: "Connections" + summary: "Create a connection" + x-speakeasy-entity-operation: "Connection#create" + get: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionsResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listConnections" + x-speakeasy-alias: "listConnections" + x-speakeasy-group: "Connections" + summary: "List connections" + x-speakeasy-entity-operation: "Connection#list" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list connections for.\ + \ Empty list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted connections in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of Connections returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning Connections. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + /connections/{connectionId}: + get: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Postgres To Snowflake" + sourceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + destinationId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Get a Connection by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getConnection" + x-speakeasy-alias: "getConnection" + x-speakeasy-group: "Connections" + summary: "Get Connection details" + x-speakeasy-entity-operation: "Connection#read" + patch: + tags: + - "public_connections" + - "public" + - "Connections" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionPatchRequest" + examples: + Connection Update Request Example: + value: + sourceId: "95e66a59-8045-4307-9678-63bc3c9b8c93" + destinationId: "e478de0d-a3a0-475c-b019-25f7dd29e281" + name: "Postgres-to-Bigquery" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Postgres To Snowflake" + sourceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + destinationId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a Connection by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchConnection" + x-speakeasy-alias: "patchConnection" + x-speakeasy-group: "Connections" + summary: "Update Connection details" + x-speakeasy-entity-operation: "Connection#update" + delete: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteConnection" + x-speakeasy-alias: "deleteConnection" + x-speakeasy-group: "Connections" + summary: "Delete a Connection" + x-speakeasy-entity-operation: "Connection#delete" + parameters: + - name: "connectionId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml new file mode 100644 index 00000000000..c5ca933d78a --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_destinations.yaml @@ -0,0 +1,19739 @@ +--- +openapi: "3.1.0" +info: + title: "Destinations" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /destinations: + get: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationsResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listDestinations" + x-speakeasy-alias: "listDestinations" + x-speakeasy-group: "Destinations" + summary: "List destinations" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list destinations for.\ + \ Empty list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted destinations in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of destinations returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning destinations. The\ + \ default is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationCreateRequest" + examples: + Destination Creation Request Example: + value: + name: "Postgres" + workspaceId: "2155ae5a-de39-4808-af6a-16fe7b8b4ed2" + configuration: + airbyte_destination_name: "postgres" + port: 5432 + schema: "public" + ssl_mode: + mode: "prefer" + tunnel_method: + tunnel_method: "NO_TUNNEL" + host: "localhost" + database: "postgres" + username: "postgres" + password: "test" + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Creation Response Example: + value: + destinationId: "af0c3c67-aa61-419f-8922-95b0bf840e86" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "createDestination" + x-speakeasy-alias: "createDestination" + x-speakeasy-group: "Destinations" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob\ + \ containing the configuration for the source." + /destinations/{destinationId}: + get: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Get Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "My Destination" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestination" + x-speakeasy-alias: "getDestination" + x-speakeasy-group: "Destinations" + summary: "Get Destination details" + delete: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestination" + x-speakeasy-alias: "deleteDestination" + x-speakeasy-group: "Destinations" + summary: "Delete a Destination" + patch: + tags: + - "public_destinations" + - "public" + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPatchRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Destination" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Update a Destination" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchDestination" + x-speakeasy-alias: "patchDestination" + x-speakeasy-group: "Destinations" + summary: "Update a Destination" + put: + tags: + - "public_destinations" + - "public" + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPutRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Destination" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Update a Destination and fully overwrite it" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestination" + x-speakeasy-alias: "putDestination" + x-speakeasy-group: "Destinations" + summary: "Update a Destination and fully overwrite it" + x-speakeasy-entity-operation: "Destination#update" + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + destination-gcs: + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + - "format" + - "destinationType" + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + order: 1 + type: "string" + description: + "You can find the bucket name in the App Engine Admin console\ + \ Application Settings page, under the label Google Cloud Storage Bucket.\ + \ Read more here." + examples: + - "airbyte_sync" + gcs_bucket_path: + title: "GCS Bucket Path" + description: + "GCS Bucket Path string Subdirectory under the above bucket\ + \ to sync the data into." + order: 2 + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + order: 3 + default: "us" + description: + "Select a Region of the GCS Bucket. Read more here." + enum: + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "southamerica-east1" + - "southamerica-west1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "asia" + - "eu" + - "us" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Authentication" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read more\ + \ here." + type: "object" + order: 0 + oneOf: + - title: "HMAC Key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "When linked to a service account, this ID is 61 characters\ + \ long; when linked to a user account, it is 24 characters long.\ + \ Read more here." + title: "Access ID" + airbyte_secret: true + order: 0 + examples: + - "1234567890abcdefghij1234" + x-speakeasy-param-sensitive: true + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string. Read more here." + title: "Secret" + airbyte_secret: true + order: 1 + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + x-speakeasy-param-sensitive: true + format: + title: "Output Format" + type: "object" + description: + "Output data format. One of the following formats must be selected\ + \ - AVRO format, PARQUET format, CSV format, or JSONL format." + order: 4 + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "The presets 0-3 are fast presets with medium compression.\ + \ The presets 4-6 are fairly slow presets with high compression.\ + \ The default preset is 6. The presets 7-9 are like the preset\ + \ 6 but use bigger dictionaries and have higher compressor\ + \ and decompressor memory requirements. Unless the uncompressed\ + \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\ + \ waste of memory to use the presets 7, 8, or 9, respectively.\ + \ Read more here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization" + description: + "Whether the input JSON data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + default: "UNCOMPRESSED" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + destinationType: + title: "gcs" + const: "gcs" + enum: + - "gcs" + order: 0 + type: "string" + destination-gcs-update: + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + - "format" + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + order: 1 + type: "string" + description: + "You can find the bucket name in the App Engine Admin console\ + \ Application Settings page, under the label Google Cloud Storage Bucket.\ + \ Read more here." + examples: + - "airbyte_sync" + gcs_bucket_path: + title: "GCS Bucket Path" + description: + "GCS Bucket Path string Subdirectory under the above bucket\ + \ to sync the data into." + order: 2 + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + order: 3 + default: "us" + description: + "Select a Region of the GCS Bucket. Read more here." + enum: + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "southamerica-east1" + - "southamerica-west1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "asia" + - "eu" + - "us" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Authentication" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read more\ + \ here." + type: "object" + order: 0 + oneOf: + - title: "HMAC Key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "When linked to a service account, this ID is 61 characters\ + \ long; when linked to a user account, it is 24 characters long.\ + \ Read more here." + title: "Access ID" + airbyte_secret: true + order: 0 + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string. Read more here." + title: "Secret" + airbyte_secret: true + order: 1 + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + format: + title: "Output Format" + type: "object" + description: + "Output data format. One of the following formats must be selected\ + \ - AVRO format, PARQUET format, CSV format, or JSONL format." + order: 4 + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "The presets 0-3 are fast presets with medium compression.\ + \ The presets 4-6 are fairly slow presets with high compression.\ + \ The default preset is 6. The presets 7-9 are like the preset\ + \ 6 but use bigger dictionaries and have higher compressor\ + \ and decompressor memory requirements. Unless the uncompressed\ + \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\ + \ waste of memory to use the presets 7, 8, or 9, respectively.\ + \ Read more here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization" + description: + "Whether the input JSON data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + default: "UNCOMPRESSED" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + destination-clickhouse: + title: "ClickHouse Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "HTTP port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: false + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "clickhouse" + const: "clickhouse" + enum: + - "clickhouse" + order: 0 + type: "string" + destination-clickhouse-update: + title: "ClickHouse Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "HTTP port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: false + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-mssql: + title: "MS SQL Server Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "The host name of the MSSQL database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the MSSQL database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1433 + examples: + - "1433" + order: 1 + database: + title: "DB Name" + description: "The name of the MSSQL database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with this username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used to communicate with the\ + \ database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "The data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + destination-mssql-update: + title: "MS SQL Server Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "The host name of the MSSQL database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the MSSQL database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1433 + examples: + - "1433" + order: 1 + database: + title: "DB Name" + description: "The name of the MSSQL database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with this username." + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used to communicate with the\ + \ database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "The data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-mysql: + title: "MySQL Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + raw_data_schema: + type: "string" + description: "The database to write raw tables into" + title: "Raw table database (defaults to airbyte_internal)" + order: 7 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mysql" + const: "mysql" + enum: + - "mysql" + order: 0 + type: "string" + destination-mysql-update: + title: "MySQL Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + raw_data_schema: + type: "string" + description: "The database to write raw tables into" + title: "Raw table database (defaults to airbyte_internal)" + order: 7 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-pubsub: + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + - "ordering_enabled" + - "batching_enabled" + - "destinationType" + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub." + title: "Project ID" + topic_id: + type: "string" + description: "The PubSub topic ID in the given GCP project ID." + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + ordering_enabled: + title: "Message Ordering Enabled" + description: + "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\ + \ of stream" + type: "boolean" + default: false + batching_enabled: + type: "boolean" + title: "Message Batching Enabled" + description: + "If TRUE messages will be buffered instead of sending them\ + \ one by one" + default: false + batching_delay_threshold: + type: "integer" + title: "Message Batching: Delay Threshold" + description: "Number of ms before the buffer is flushed" + default: 1 + minimum: 1 + batching_element_count_threshold: + type: "integer" + title: "Message Batching: Element Count Threshold" + description: "Number of messages before the buffer is flushed" + default: 1 + minimum: 1 + batching_request_bytes_threshold: + type: "integer" + title: "Message Batching: Request Bytes Threshold" + description: "Number of bytes before the buffer is flushed" + default: 1 + minimum: 1 + destinationType: + title: "pubsub" + const: "pubsub" + enum: + - "pubsub" + order: 0 + type: "string" + destination-pubsub-update: + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + - "ordering_enabled" + - "batching_enabled" + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub." + title: "Project ID" + topic_id: + type: "string" + description: "The PubSub topic ID in the given GCP project ID." + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + ordering_enabled: + title: "Message Ordering Enabled" + description: + "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\ + \ of stream" + type: "boolean" + default: false + batching_enabled: + type: "boolean" + title: "Message Batching Enabled" + description: + "If TRUE messages will be buffered instead of sending them\ + \ one by one" + default: false + batching_delay_threshold: + type: "integer" + title: "Message Batching: Delay Threshold" + description: "Number of ms before the buffer is flushed" + default: 1 + minimum: 1 + batching_element_count_threshold: + type: "integer" + title: "Message Batching: Element Count Threshold" + description: "Number of messages before the buffer is flushed" + default: 1 + minimum: 1 + batching_request_bytes_threshold: + type: "integer" + title: "Message Batching: Request Bytes Threshold" + description: "Number of bytes before the buffer is flushed" + default: 1 + minimum: 1 + destination-weaviate: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "No external embedding" + type: "object" + properties: + mode: + title: "Mode" + default: "no_embedding" + const: "no_embedding" + enum: + - "no_embedding" + type: "string" + description: + "Do not calculate and pass embeddings to Weaviate. Suitable\ + \ for clusters with configured vectorizers to calculate embeddings within\ + \ Weaviate or for classes that should only support regular text search." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + - "mode" + description: + "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Weaviate cluster." + order: 1 + examples: + - "https://my-cluster.weaviate.network" + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Weaviate instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Weaviate\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Weaviate cluster" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Weaviate cluster" + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Weaviate clusters)" + - title: "No Authentication" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + batch_size: + title: "Batch Size" + description: "The number of records to send to Weaviate in each batch" + default: 128 + type: "integer" + text_field: + title: "Text Field" + description: "The field in the object that contains the embedded text" + default: "text" + type: "string" + tenant_id: + title: "Tenant ID" + description: "The tenant ID to use for multi tenancy" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + default_vectorizer: + title: "Default Vectorizer" + description: "The vectorizer to use if new classes need to be created" + default: "none" + enum: + - "none" + - "text2vec-cohere" + - "text2vec-huggingface" + - "text2vec-openai" + - "text2vec-palm" + - "text2vec-contextionary" + - "text2vec-transformers" + - "text2vec-gpt4all" + type: "string" + additional_headers: + title: "Additional headers" + description: "Additional HTTP headers to send with every request." + default: [] + examples: + - header_key: "X-OpenAI-Api-Key" + value: "my-openai-api-key" + type: "array" + items: + title: "Header" + type: "object" + properties: + header_key: + title: "Header Key" + type: "string" + value: + title: "Header Value" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "header_key" + - "value" + required: + - "host" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "weaviate" + const: "weaviate" + enum: + - "weaviate" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-weaviate-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "No external embedding" + type: "object" + properties: + mode: + title: "Mode" + default: "no_embedding" + const: "no_embedding" + enum: + - "no_embedding" + type: "string" + description: + "Do not calculate and pass embeddings to Weaviate. Suitable\ + \ for clusters with configured vectorizers to calculate embeddings within\ + \ Weaviate or for classes that should only support regular text search." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + - "mode" + description: + "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Weaviate cluster." + order: 1 + examples: + - "https://my-cluster.weaviate.network" + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Weaviate instance" + airbyte_secret: true + type: "string" + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Weaviate\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Weaviate cluster" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Weaviate cluster" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Weaviate clusters)" + - title: "No Authentication" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + batch_size: + title: "Batch Size" + description: "The number of records to send to Weaviate in each batch" + default: 128 + type: "integer" + text_field: + title: "Text Field" + description: "The field in the object that contains the embedded text" + default: "text" + type: "string" + tenant_id: + title: "Tenant ID" + description: "The tenant ID to use for multi tenancy" + default: "" + airbyte_secret: true + type: "string" + default_vectorizer: + title: "Default Vectorizer" + description: "The vectorizer to use if new classes need to be created" + default: "none" + enum: + - "none" + - "text2vec-cohere" + - "text2vec-huggingface" + - "text2vec-openai" + - "text2vec-palm" + - "text2vec-contextionary" + - "text2vec-transformers" + - "text2vec-gpt4all" + type: "string" + additional_headers: + title: "Additional headers" + description: "Additional HTTP headers to send with every request." + default: [] + examples: + - header_key: "X-OpenAI-Api-Key" + value: "my-openai-api-key" + type: "array" + items: + title: "Header" + type: "object" + properties: + header_key: + title: "Header Key" + type: "string" + value: + title: "Header Value" + airbyte_secret: true + type: "string" + required: + - "header_key" + - "value" + required: + - "host" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-mongodb: + title: "MongoDB Destination Spec" + type: "object" + required: + - "database" + - "auth_type" + - "destinationType" + properties: + instance_type: + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + \ Set TLS connection is used by default." + title: "MongoDb Instance Type" + type: "object" + order: 0 + oneOf: + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "The Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "The Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS Connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + auth_type: + title: "Authorization type" + type: "object" + description: "Authorization type." + oneOf: + - title: "None" + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + enum: + - "none" + - title: "Login/Password" + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + enum: + - "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mongodb" + const: "mongodb" + enum: + - "mongodb" + order: 0 + type: "string" + destination-mongodb-update: + title: "MongoDB Destination Spec" + type: "object" + required: + - "database" + - "auth_type" + properties: + instance_type: + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + \ Set TLS connection is used by default." + title: "MongoDb Instance Type" + type: "object" + order: 0 + oneOf: + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "The Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "The Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS Connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + auth_type: + title: "Authorization type" + type: "object" + description: "Authorization type." + oneOf: + - title: "None" + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + enum: + - "none" + - title: "Login/Password" + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + enum: + - "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-vectara: + title: "Vectara Config" + type: "object" + properties: + oauth2: + title: "OAuth2.0 Credentials" + type: "object" + properties: + client_id: + title: "OAuth Client ID" + description: "OAuth2.0 client id" + order: 0 + type: "string" + client_secret: + title: "OAuth Client Secret" + description: "OAuth2.0 client secret" + airbyte_secret: true + order: 1 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + description: + "OAuth2.0 credentials used to authenticate admin actions (creating/deleting\ + \ corpora)" + group: "auth" + customer_id: + title: "Customer ID" + description: "Your customer id as it is in the authenticaion url" + order: 2 + group: "account" + type: "string" + corpus_name: + title: "Corpus Name" + description: "The Name of Corpus to load data into" + order: 3 + group: "account" + type: "string" + parallelize: + title: "Parallelize" + description: "Parallelize indexing into Vectara with multiple threads" + default: false + always_show: true + group: "account" + type: "boolean" + text_fields: + title: "Text fields to index with Vectara" + description: + "List of fields in the record that should be in the section\ + \ of the document. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all fields\ + \ are considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + title_field: + title: "Text field to use as document title with Vectara" + description: + "A field that will be used to populate the `title` of each\ + \ document. The field list is applied to all streams in the same way and\ + \ non-existing fields are ignored. If none are defined, all fields are\ + \ considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: "" + always_show: true + examples: + - "document_key" + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as metadata.\ + \ The field list is applied to all streams in the same way and non-existing\ + \ fields are ignored. If none are defined, all fields are considered metadata\ + \ fields. When specifying text fields, you can access nested fields in\ + \ the record by using dot notation, e.g. `user.name` will access the `name`\ + \ field in the `user` object. It's also possible to use wildcards to access\ + \ all fields in an object, e.g. `users.*.name` will access all `names`\ + \ fields in all entries of the `users` array. When specifying nested paths,\ + \ all matching values are flattened into an array set to a field named\ + \ by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + type: "array" + items: + type: "string" + destinationType: + title: "vectara" + const: "vectara" + enum: + - "vectara" + order: 0 + type: "string" + required: + - "oauth2" + - "customer_id" + - "corpus_name" + - "destinationType" + description: "Configuration to connect to the Vectara instance" + groups: + - id: "account" + title: "Account" + - id: "auth" + title: "Authentication" + destination-vectara-update: + title: "Vectara Config" + type: "object" + properties: + oauth2: + title: "OAuth2.0 Credentials" + type: "object" + properties: + client_id: + title: "OAuth Client ID" + description: "OAuth2.0 client id" + order: 0 + type: "string" + client_secret: + title: "OAuth Client Secret" + description: "OAuth2.0 client secret" + airbyte_secret: true + order: 1 + type: "string" + required: + - "client_id" + - "client_secret" + description: + "OAuth2.0 credentials used to authenticate admin actions (creating/deleting\ + \ corpora)" + group: "auth" + customer_id: + title: "Customer ID" + description: "Your customer id as it is in the authenticaion url" + order: 2 + group: "account" + type: "string" + corpus_name: + title: "Corpus Name" + description: "The Name of Corpus to load data into" + order: 3 + group: "account" + type: "string" + parallelize: + title: "Parallelize" + description: "Parallelize indexing into Vectara with multiple threads" + default: false + always_show: true + group: "account" + type: "boolean" + text_fields: + title: "Text fields to index with Vectara" + description: + "List of fields in the record that should be in the section\ + \ of the document. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all fields\ + \ are considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + title_field: + title: "Text field to use as document title with Vectara" + description: + "A field that will be used to populate the `title` of each\ + \ document. The field list is applied to all streams in the same way and\ + \ non-existing fields are ignored. If none are defined, all fields are\ + \ considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: "" + always_show: true + examples: + - "document_key" + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as metadata.\ + \ The field list is applied to all streams in the same way and non-existing\ + \ fields are ignored. If none are defined, all fields are considered metadata\ + \ fields. When specifying text fields, you can access nested fields in\ + \ the record by using dot notation, e.g. `user.name` will access the `name`\ + \ field in the `user` object. It's also possible to use wildcards to access\ + \ all fields in an object, e.g. `users.*.name` will access all `names`\ + \ fields in all entries of the `users` array. When specifying nested paths,\ + \ all matching values are flattened into an array set to a field named\ + \ by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + type: "array" + items: + type: "string" + required: + - "oauth2" + - "customer_id" + - "corpus_name" + description: "Configuration to connect to the Vectara instance" + groups: + - id: "account" + title: "Account" + - id: "auth" + title: "Authentication" + destination-s3-glue: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + - "destinationType" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 4 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "Root level flattening" + enum: + - "No flattening" + - "Root level flattening" + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: + "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: + "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + destinationType: + title: "s3-glue" + const: "s3-glue" + enum: + - "s3-glue" + order: 0 + type: "string" + destination-s3-glue-update: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 4 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "Root level flattening" + enum: + - "No flattening" + - "Root level flattening" + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: + "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: + "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + destination-dev-null: + title: "E2E Test Destination Spec" + type: "object" + required: + - "test_destination" + - "destinationType" + properties: + test_destination: + title: "Test Destination" + type: "object" + description: "The type of destination to be used" + oneOf: + - title: "Logging" + required: + - "test_destination_type" + - "logging_config" + properties: + test_destination_type: + type: "string" + const: "LOGGING" + default: "LOGGING" + enum: + - "LOGGING" + logging_config: + title: "Logging Configuration" + type: "object" + description: "Configurate how the messages are logged." + oneOf: + - title: "First N Entries" + description: "Log first N entries per stream." + type: "object" + required: + - "logging_type" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "FirstN" + default: "FirstN" + max_entry_count: + title: "N" + description: + "Number of entries to log. This destination is\ + \ for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Every N-th Entry" + description: + "For each stream, log every N-th entry with a maximum\ + \ cap." + type: "object" + required: + - "logging_type" + - "nth_entry_to_log" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "EveryNth" + default: "EveryNth" + nth_entry_to_log: + title: "N" + description: + "The N-th entry to log for each stream. N starts\ + \ from 1. For example, when N = 1, every entry is logged;\ + \ when N = 2, every other entry is logged; when N = 3, one\ + \ out of three entries is logged." + type: "number" + example: + - 3 + minimum: 1 + maximum: 1000 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Random Sampling" + description: + "For each stream, randomly log a percentage of the\ + \ entries with a maximum cap." + type: "object" + required: + - "logging_type" + - "sampling_ratio" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "RandomSampling" + default: "RandomSampling" + sampling_ratio: + title: "Sampling Ratio" + description: "A positive floating number smaller than 1." + type: "number" + default: 0.001 + examples: + - 0.001 + minimum: 0 + maximum: 1 + seed: + title: "Random Number Generator Seed" + description: + "When the seed is unspecified, the current time\ + \ millis will be used as the seed." + type: "number" + examples: + - 1900 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Silent" + required: + - "test_destination_type" + properties: + test_destination_type: + type: "string" + const: "SILENT" + default: "SILENT" + enum: + - "SILENT" + - title: "Throttled" + required: + - "test_destination_type" + - "millis_per_record" + properties: + test_destination_type: + type: "string" + const: "THROTTLED" + default: "THROTTLED" + enum: + - "THROTTLED" + millis_per_record: + description: "Number of milli-second to pause in between records." + type: "integer" + - title: "Failing" + required: + - "test_destination_type" + - "num_messages" + properties: + test_destination_type: + type: "string" + const: "FAILING" + default: "FAILING" + enum: + - "FAILING" + num_messages: + description: "Number of messages after which to fail." + type: "integer" + destinationType: + title: "dev-null" + const: "dev-null" + enum: + - "dev-null" + order: 0 + type: "string" + destination-dev-null-update: + title: "E2E Test Destination Spec" + type: "object" + required: + - "test_destination" + properties: + test_destination: + title: "Test Destination" + type: "object" + description: "The type of destination to be used" + oneOf: + - title: "Logging" + required: + - "test_destination_type" + - "logging_config" + properties: + test_destination_type: + type: "string" + const: "LOGGING" + default: "LOGGING" + enum: + - "LOGGING" + logging_config: + title: "Logging Configuration" + type: "object" + description: "Configurate how the messages are logged." + oneOf: + - title: "First N Entries" + description: "Log first N entries per stream." + type: "object" + required: + - "logging_type" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "FirstN" + default: "FirstN" + max_entry_count: + title: "N" + description: + "Number of entries to log. This destination is\ + \ for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Every N-th Entry" + description: + "For each stream, log every N-th entry with a maximum\ + \ cap." + type: "object" + required: + - "logging_type" + - "nth_entry_to_log" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "EveryNth" + default: "EveryNth" + nth_entry_to_log: + title: "N" + description: + "The N-th entry to log for each stream. N starts\ + \ from 1. For example, when N = 1, every entry is logged;\ + \ when N = 2, every other entry is logged; when N = 3, one\ + \ out of three entries is logged." + type: "number" + example: + - 3 + minimum: 1 + maximum: 1000 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Random Sampling" + description: + "For each stream, randomly log a percentage of the\ + \ entries with a maximum cap." + type: "object" + required: + - "logging_type" + - "sampling_ratio" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "RandomSampling" + default: "RandomSampling" + sampling_ratio: + title: "Sampling Ratio" + description: "A positive floating number smaller than 1." + type: "number" + default: 0.001 + examples: + - 0.001 + minimum: 0 + maximum: 1 + seed: + title: "Random Number Generator Seed" + description: + "When the seed is unspecified, the current time\ + \ millis will be used as the seed." + type: "number" + examples: + - 1900 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Silent" + required: + - "test_destination_type" + properties: + test_destination_type: + type: "string" + const: "SILENT" + default: "SILENT" + enum: + - "SILENT" + - title: "Throttled" + required: + - "test_destination_type" + - "millis_per_record" + properties: + test_destination_type: + type: "string" + const: "THROTTLED" + default: "THROTTLED" + enum: + - "THROTTLED" + millis_per_record: + description: "Number of milli-second to pause in between records." + type: "integer" + - title: "Failing" + required: + - "test_destination_type" + - "num_messages" + properties: + test_destination_type: + type: "string" + const: "FAILING" + default: "FAILING" + enum: + - "FAILING" + num_messages: + description: "Number of messages after which to fail." + type: "integer" + destination-snowflake-cortex: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Snowflake Connection" + type: "object" + properties: + host: + title: "Host" + description: + "Enter the account name you want to use to access the database.\ + \ This is usually the identifier before .snowflakecomputing.com" + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + role: + title: "Role" + description: "Enter the role that you want to use to access Snowflake" + order: 2 + examples: + - "AIRBYTE_ROLE" + - "ACCOUNTADMIN" + type: "string" + warehouse: + title: "Warehouse" + description: + "Enter the name of the warehouse that you want to use as\ + \ a compute cluster" + order: 3 + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "default_schema" + - "username" + - "credentials" + description: "Snowflake can be used to store vector data and retrieve embeddings." + group: "indexing" + destinationType: + title: "snowflake-cortex" + const: "snowflake-cortex" + enum: + - "snowflake-cortex" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-snowflake-cortex-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Snowflake Connection" + type: "object" + properties: + host: + title: "Host" + description: + "Enter the account name you want to use to access the database.\ + \ This is usually the identifier before .snowflakecomputing.com" + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + role: + title: "Role" + description: "Enter the role that you want to use to access Snowflake" + order: 2 + examples: + - "AIRBYTE_ROLE" + - "ACCOUNTADMIN" + type: "string" + warehouse: + title: "Warehouse" + description: + "Enter the name of the warehouse that you want to use as\ + \ a compute cluster" + order: 3 + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + required: + - "password" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "default_schema" + - "username" + - "credentials" + description: "Snowflake can be used to store vector data and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-timeplus: + title: "Destination Timeplus" + type: "object" + required: + - "endpoint" + - "apikey" + - "destinationType" + properties: + endpoint: + title: "Endpoint" + description: "Timeplus workspace endpoint" + type: "string" + default: "https://us-west-2.timeplus.cloud/" + examples: + - "https://us-west-2.timeplus.cloud/workspace_id" + order: 0 + apikey: + title: "API key" + description: "Personal API key" + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + destinationType: + title: "timeplus" + const: "timeplus" + enum: + - "timeplus" + order: 0 + type: "string" + destination-timeplus-update: + title: "Destination Timeplus" + type: "object" + required: + - "endpoint" + - "apikey" + properties: + endpoint: + title: "Endpoint" + description: "Timeplus workspace endpoint" + type: "string" + default: "https://us-west-2.timeplus.cloud/" + examples: + - "https://us-west-2.timeplus.cloud/workspace_id" + order: 0 + apikey: + title: "API key" + description: "Personal API key" + type: "string" + airbyte_secret: true + order: 1 + destination-convex: + title: "Destination Convex" + type: "object" + required: + - "deployment_url" + - "access_key" + - "destinationType" + properties: + deployment_url: + type: "string" + description: "URL of the Convex deployment that is the destination" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + description: "API access key used to send data to a Convex deployment." + airbyte_secret: "true" + x-speakeasy-param-sensitive: true + destinationType: + title: "convex" + const: "convex" + enum: + - "convex" + order: 0 + type: "string" + destination-convex-update: + title: "Destination Convex" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + description: "URL of the Convex deployment that is the destination" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + description: "API access key used to send data to a Convex deployment." + airbyte_secret: "true" + destination-firestore: + title: "Destination Google Firestore" + type: "object" + required: + - "project_id" + - "destinationType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + destinationType: + title: "firestore" + const: "firestore" + enum: + - "firestore" + order: 0 + type: "string" + destination-firestore-update: + title: "Destination Google Firestore" + type: "object" + required: + - "project_id" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + destination-redshift: + title: "Redshift Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" + - "destinationType" + properties: + host: + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)" + type: "string" + title: "Host" + group: "connection" + order: 1 + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + title: "Port" + group: "connection" + order: 2 + username: + description: "Username to use to access the database." + type: "string" + title: "Username" + group: "connection" + order: 3 + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + group: "connection" + order: 4 + x-speakeasy-param-sensitive: true + database: + description: "Name of the database." + type: "string" + title: "Database" + group: "connection" + order: 5 + schema: + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. Unless specifically configured, the usual value\ + \ for this field is \"public\"." + type: "string" + examples: + - "public" + default: "public" + group: "connection" + title: "Default Schema" + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + group: "connection" + order: 7 + uploading_method: + title: "Uploading Method" + type: "object" + description: "The way data will be uploaded to Redshift." + group: "connection" + order: 8 + display_type: "radio" + oneOf: + - title: "AWS S3 Staging" + description: + "(recommended) Uploads data to S3 and then uses a\ + \ COPY to insert the data into Redshift. COPY is recommended for production\ + \ workloads for better speed and scalability. See AWS docs for more details." + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + enum: + - "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket." + examples: + - "airbyte.staging" + order: 0 + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: + "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 2 + access_key_id: + type: "string" + description: + "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key Id" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Secret Access Key" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for\ + \ the S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: + "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)." + title: "Destinations V2 Raw Table Schema" + order: 9 + group: "tables" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + group: "tables" + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 12 + group: "tables" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "redshift" + const: "redshift" + enum: + - "redshift" + order: 0 + type: "string" + groups: + - id: "connection" + title: "Connection" + - id: "tables" + title: "Tables" + destination-redshift-update: + title: "Redshift Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" + properties: + host: + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)" + type: "string" + title: "Host" + group: "connection" + order: 1 + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + title: "Port" + group: "connection" + order: 2 + username: + description: "Username to use to access the database." + type: "string" + title: "Username" + group: "connection" + order: 3 + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + group: "connection" + order: 4 + database: + description: "Name of the database." + type: "string" + title: "Database" + group: "connection" + order: 5 + schema: + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. Unless specifically configured, the usual value\ + \ for this field is \"public\"." + type: "string" + examples: + - "public" + default: "public" + group: "connection" + title: "Default Schema" + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + group: "connection" + order: 7 + uploading_method: + title: "Uploading Method" + type: "object" + description: "The way data will be uploaded to Redshift." + group: "connection" + order: 8 + display_type: "radio" + oneOf: + - title: "AWS S3 Staging" + description: + "(recommended) Uploads data to S3 and then uses a\ + \ COPY to insert the data into Redshift. COPY is recommended for production\ + \ workloads for better speed and scalability. See AWS docs for more details." + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + enum: + - "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket." + examples: + - "airbyte.staging" + order: 0 + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: + "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 2 + access_key_id: + type: "string" + description: + "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: + "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Secret Access Key" + airbyte_secret: true + order: 4 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for\ + \ the S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: + "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)." + title: "Destinations V2 Raw Table Schema" + order: 9 + group: "tables" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + group: "tables" + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 12 + group: "tables" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + groups: + - id: "connection" + title: "Connection" + - id: "tables" + title: "Tables" + destination-dynamodb: + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name_prefix" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + - "destinationType" + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name_prefix: + title: "Table name prefix" + type: "string" + description: "The prefix to use when naming DynamoDB tables." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + access_key_id: + type: "string" + description: + "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + x-speakeasy-param-sensitive: true + destinationType: + title: "dynamodb" + const: "dynamodb" + enum: + - "dynamodb" + order: 0 + type: "string" + destination-dynamodb-update: + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name_prefix" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name_prefix: + title: "Table name prefix" + type: "string" + description: "The prefix to use when naming DynamoDB tables." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + access_key_id: + type: "string" + description: + "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + destination-qdrant: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + url: + title: "Public Endpoint" + description: "Public Endpoint of the Qdrant cluser" + order: 0 + type: "string" + auth_method: + title: "Authentication Method" + description: "Method to authenticate with the Qdrant Instance" + default: "api_key_auth" + type: "object" + order: 1 + oneOf: + - title: "ApiKeyAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "api_key_auth" + const: "api_key_auth" + enum: + - "api_key_auth" + type: "string" + api_key: + title: "API Key" + description: "API Key for the Qdrant instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "api_key" + - title: "NoAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + prefer_grpc: + title: "Prefer gRPC" + description: + "Whether to prefer gRPC over HTTP. Set to true for Qdrant\ + \ cloud clusters" + default: true + type: "boolean" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 2 + type: "string" + distance_metric: + title: "Distance Metric" + description: + "The Distance metric used to measure similarities among\ + \ vectors. This field is only used if the collection defined in the\ + \ does not exist yet and is created automatically by the connector." + default: "cos" + enum: + - "dot" + - "cos" + - "euc" + type: "string" + text_field: + title: "Text Field" + description: "The field in the payload that contains the embedded text" + default: "text" + type: "string" + required: + - "url" + - "collection" + group: "Indexing" + description: "Indexing configuration" + destinationType: + title: "qdrant" + const: "qdrant" + enum: + - "qdrant" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-qdrant-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + url: + title: "Public Endpoint" + description: "Public Endpoint of the Qdrant cluser" + order: 0 + type: "string" + auth_method: + title: "Authentication Method" + description: "Method to authenticate with the Qdrant Instance" + default: "api_key_auth" + type: "object" + order: 1 + oneOf: + - title: "ApiKeyAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "api_key_auth" + const: "api_key_auth" + enum: + - "api_key_auth" + type: "string" + api_key: + title: "API Key" + description: "API Key for the Qdrant instance" + airbyte_secret: true + type: "string" + required: + - "api_key" + - title: "NoAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + prefer_grpc: + title: "Prefer gRPC" + description: + "Whether to prefer gRPC over HTTP. Set to true for Qdrant\ + \ cloud clusters" + default: true + type: "boolean" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 2 + type: "string" + distance_metric: + title: "Distance Metric" + description: + "The Distance metric used to measure similarities among\ + \ vectors. This field is only used if the collection defined in the\ + \ does not exist yet and is created automatically by the connector." + default: "cos" + enum: + - "dot" + - "cos" + - "euc" + type: "string" + text_field: + title: "Text Field" + description: "The field in the payload that contains the embedded text" + default: "text" + type: "string" + required: + - "url" + - "collection" + group: "Indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-snowflake: + title: "Snowflake Destination Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + - "destinationType" + properties: + host: + description: + "Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)" + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" + type: "string" + title: "Host" + pattern: + "^(http(s)?:\\/\\/)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\\ + .(snowflakecomputing\\.com|localstack\\.cloud))$" + pattern_descriptor: "{account_name}.snowflakecomputing.com or {accountname}.{aws_location}.aws.snowflakecomputing.com" + order: 0 + role: + description: + "Enter the role that you want to use to access Snowflake" + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 1 + warehouse: + description: + "Enter the name of the warehouse that you want to use as a compute cluster" + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 2 + database: + description: + "Enter the name of the database you want to sync data into" + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 3 + schema: + description: + "Enter the name of the default schema" + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Default Schema" + order: 4 + username: + description: "Enter the name of the user you want to use to access the database" + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 5 + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Key Pair Authentication" + type: "object" + order: 0 + required: + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + enum: + - "Key Pair Authentication" + default: "Key Pair Authentication" + order: 0 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Username and Password" + type: "object" + required: + - "password" + order: 1 + properties: + auth_type: + type: "string" + const: "Username and Password" + enum: + - "Username and Password" + default: "Username and Password" + order: 0 + password: + description: "Enter the password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 1 + x-speakeasy-param-sensitive: true + - title: "OAuth2.0" + type: "object" + order: 2 + required: + - "access_token" + - "refresh_token" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + default: "OAuth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "Enter your application's Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "Enter your application's Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Enter you application's Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Enter your application's Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 6 + jdbc_url_params: + description: + "Enter the additional properties to pass to the JDBC URL string\ + \ when connecting to the database (formatted as key=value pairs separated\ + \ by the symbol &). Example: key1=value1&key2=value2&key3=value3" + title: "JDBC URL Params" + type: "string" + order: 7 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 10 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + retention_period_days: + type: "integer" + default: 1 + description: + "The number of days of Snowflake Time Travel to enable on the\ + \ tables. See Snowflake's documentation for more information. Setting a nonzero\ + \ value will incur increased storage costs in your Snowflake instance." + title: "Data Retention Period (days)" + order: 13 + use_merge_for_upsert: + type: "boolean" + default: false + description: + "Use MERGE for de-duplication of final tables. This option\ + \ no effect if Final tables are disabled or Sync mode is not DEDUPE" + title: "Use MERGE for De-duplication of final tables" + order: 14 + destinationType: + title: "snowflake" + const: "snowflake" + enum: + - "snowflake" + order: 0 + type: "string" + destination-snowflake-update: + title: "Snowflake Destination Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + properties: + host: + description: + "Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)" + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" + type: "string" + title: "Host" + pattern: + "^(http(s)?:\\/\\/)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\\ + .(snowflakecomputing\\.com|localstack\\.cloud))$" + pattern_descriptor: "{account_name}.snowflakecomputing.com or {accountname}.{aws_location}.aws.snowflakecomputing.com" + order: 0 + role: + description: + "Enter the role that you want to use to access Snowflake" + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 1 + warehouse: + description: + "Enter the name of the warehouse that you want to use as a compute cluster" + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 2 + database: + description: + "Enter the name of the database you want to sync data into" + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 3 + schema: + description: + "Enter the name of the default schema" + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Default Schema" + order: 4 + username: + description: "Enter the name of the user you want to use to access the database" + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 5 + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Key Pair Authentication" + type: "object" + order: 0 + required: + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + enum: + - "Key Pair Authentication" + default: "Key Pair Authentication" + order: 0 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + - title: "Username and Password" + type: "object" + required: + - "password" + order: 1 + properties: + auth_type: + type: "string" + const: "Username and Password" + enum: + - "Username and Password" + default: "Username and Password" + order: 0 + password: + description: "Enter the password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 1 + - title: "OAuth2.0" + type: "object" + order: 2 + required: + - "access_token" + - "refresh_token" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + default: "OAuth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "Enter your application's Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Enter your application's Client secret" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Enter you application's Access Token" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Enter your application's Refresh Token" + airbyte_secret: true + order: 6 + jdbc_url_params: + description: + "Enter the additional properties to pass to the JDBC URL string\ + \ when connecting to the database (formatted as key=value pairs separated\ + \ by the symbol &). Example: key1=value1&key2=value2&key3=value3" + title: "JDBC URL Params" + type: "string" + order: 7 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 10 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + retention_period_days: + type: "integer" + default: 1 + description: + "The number of days of Snowflake Time Travel to enable on the\ + \ tables. See Snowflake's documentation for more information. Setting a nonzero\ + \ value will incur increased storage costs in your Snowflake instance." + title: "Data Retention Period (days)" + order: 13 + use_merge_for_upsert: + type: "boolean" + default: false + description: + "Use MERGE for de-duplication of final tables. This option\ + \ no effect if Final tables are disabled or Sync mode is not DEDUPE" + title: "Use MERGE for De-duplication of final tables" + order: 14 + destination-databricks: + title: "Databricks Lakehouse Destination Spec" + type: "object" + required: + - "accept_terms" + - "hostname" + - "http_path" + - "database" + - "authentication" + - "destinationType" + properties: + accept_terms: + title: "Agree to the Databricks JDBC Driver Terms & Conditions" + type: "boolean" + description: + "You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector." + default: false + order: 1 + hostname: + title: "Server Hostname" + type: "string" + description: "Databricks Cluster Server Hostname." + examples: + - "abc-12345678-wxyz.cloud.databricks.com" + order: 2 + http_path: + title: "HTTP Path" + type: "string" + description: "Databricks Cluster HTTP Path." + examples: + - "sql/1.0/warehouses/0000-1111111-abcd90" + order: 3 + port: + title: "Port" + type: "string" + description: "Databricks Cluster Port." + default: "443" + examples: + - "443" + order: 4 + database: + title: "Databricks Unity Catalog Name" + description: "The name of the unity catalog for the database" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema tables are written. If not specified otherwise,\ + \ the \"default\" will be used." + type: "string" + examples: + - "default" + default: "default" + order: 6 + authentication: + title: "Authentication" + type: "object" + description: "Authentication mechanism for Staging files and running queries" + default: "OAUTH" + order: 8 + oneOf: + - title: "OAuth2 (Recommended)" + required: + - "auth_type" + - "client_id" + - "secret" + properties: + auth_type: + type: "string" + const: "OAUTH" + order: 0 + enum: + - "OAUTH" + client_id: + type: "string" + order: 1 + secret: + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + - title: "Personal Access Token" + required: + - "auth_type" + - "personal_access_token" + properties: + auth_type: + type: "string" + const: "BASIC" + order: 0 + enum: + - "BASIC" + personal_access_token: + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Default to 'true'. Switch it to 'false' for debugging purpose." + default: true + order: 9 + raw_schema_override: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + default: "airbyte_internal" + order: 10 + destinationType: + title: "databricks" + const: "databricks" + enum: + - "databricks" + order: 0 + type: "string" + destination-databricks-update: + title: "Databricks Lakehouse Destination Spec" + type: "object" + required: + - "accept_terms" + - "hostname" + - "http_path" + - "database" + - "authentication" + properties: + accept_terms: + title: "Agree to the Databricks JDBC Driver Terms & Conditions" + type: "boolean" + description: + "You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector." + default: false + order: 1 + hostname: + title: "Server Hostname" + type: "string" + description: "Databricks Cluster Server Hostname." + examples: + - "abc-12345678-wxyz.cloud.databricks.com" + order: 2 + http_path: + title: "HTTP Path" + type: "string" + description: "Databricks Cluster HTTP Path." + examples: + - "sql/1.0/warehouses/0000-1111111-abcd90" + order: 3 + port: + title: "Port" + type: "string" + description: "Databricks Cluster Port." + default: "443" + examples: + - "443" + order: 4 + database: + title: "Databricks Unity Catalog Name" + description: "The name of the unity catalog for the database" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema tables are written. If not specified otherwise,\ + \ the \"default\" will be used." + type: "string" + examples: + - "default" + default: "default" + order: 6 + authentication: + title: "Authentication" + type: "object" + description: "Authentication mechanism for Staging files and running queries" + default: "OAUTH" + order: 8 + oneOf: + - title: "OAuth2 (Recommended)" + required: + - "auth_type" + - "client_id" + - "secret" + properties: + auth_type: + type: "string" + const: "OAUTH" + order: 0 + enum: + - "OAUTH" + client_id: + type: "string" + order: 1 + secret: + type: "string" + airbyte_secret: true + order: 2 + - title: "Personal Access Token" + required: + - "auth_type" + - "personal_access_token" + properties: + auth_type: + type: "string" + const: "BASIC" + order: 0 + enum: + - "BASIC" + personal_access_token: + type: "string" + airbyte_secret: true + order: 1 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Default to 'true'. Switch it to 'false' for debugging purpose." + default: true + order: 9 + raw_schema_override: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + default: "airbyte_internal" + order: 10 + destination-oracle: + title: "Oracle Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sid" + - "destinationType" + properties: + host: + title: "Host" + description: "The hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + examples: + - "1521" + order: 1 + sid: + title: "SID" + description: + "The System Identifier uniquely distinguishes the instance\ + \ from any other instance on the same computer." + type: "string" + order: 2 + username: + title: "User" + description: + "The username to access the database. This user must have CREATE\ + \ USER privileges in the database." + type: "string" + order: 3 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema is used as the target schema for all statements\ + \ issued from the connection that do not explicitly specify a schema name.\ + \ The usual value for this field is \"airbyte\". In Oracle, schemas and\ + \ users are the same thing, so the \"user\" parameter is used as the login\ + \ credentials and this is used for the default Airbyte message schema." + type: "string" + examples: + - "airbyte" + default: "airbyte" + order: 6 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the database encryption algorithm." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "oracle" + const: "oracle" + enum: + - "oracle" + order: 0 + type: "string" + destination-oracle-update: + title: "Oracle Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sid" + properties: + host: + title: "Host" + description: "The hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + examples: + - "1521" + order: 1 + sid: + title: "SID" + description: + "The System Identifier uniquely distinguishes the instance\ + \ from any other instance on the same computer." + type: "string" + order: 2 + username: + title: "User" + description: + "The username to access the database. This user must have CREATE\ + \ USER privileges in the database." + type: "string" + order: 3 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema is used as the target schema for all statements\ + \ issued from the connection that do not explicitly specify a schema name.\ + \ The usual value for this field is \"airbyte\". In Oracle, schemas and\ + \ users are the same thing, so the \"user\" parameter is used as the login\ + \ credentials and this is used for the default Airbyte message schema." + type: "string" + examples: + - "airbyte" + default: "airbyte" + order: 6 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the database encryption algorithm." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-aws-datalake: + title: "AWS Datalake Destination Spec" + type: "object" + required: + - "credentials" + - "region" + - "bucket_name" + - "lakeformation_database_name" + - "destinationType" + properties: + aws_account_id: + type: "string" + title: "AWS Account Id" + description: "target aws account id" + examples: + - "111111111111" + order: 1 + credentials: + title: "Authentication mode" + description: "Choose How to Authenticate to AWS." + type: "object" + oneOf: + - type: "object" + title: "IAM Role" + required: + - "role_arn" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM Role" + enum: + - "IAM Role" + default: "IAM Role" + order: 0 + role_arn: + title: "Target Role Arn" + type: "string" + description: "Will assume this role to write data to s3" + airbyte_secret: false + x-speakeasy-param-sensitive: true + - type: "object" + title: "IAM User" + required: + - "credentials_title" + - "aws_access_key_id" + - "aws_secret_access_key" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM User" + enum: + - "IAM User" + default: "IAM User" + order: 0 + aws_access_key_id: + title: "Access Key Id" + type: "string" + description: "AWS User Access Key Id" + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_secret_access_key: + title: "Secret Access Key" + type: "string" + description: "Secret Access Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 2 + region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + order: 4 + bucket_prefix: + title: "Target S3 Bucket Prefix" + type: "string" + description: "S3 prefix" + order: 5 + lakeformation_database_name: + title: "Lake Formation Database Name" + type: "string" + description: + "The default database this destination will use to create tables\ + \ in per stream. Can be changed per connection by customizing the namespace." + order: 6 + lakeformation_database_default_tag_key: + title: "Lake Formation Database Tag Key" + description: "Add a default tag key to databases created by this destination" + examples: + - "pii_level" + type: "string" + order: 7 + lakeformation_database_default_tag_values: + title: "Lake Formation Database Tag Values" + description: + "Add default values for the `Tag Key` to databases created\ + \ by this destination. Comma separate for multiple values." + examples: + - "private,public" + type: "string" + order: 8 + lakeformation_governed_tables: + title: "Lake Formation Governed Tables" + description: "Whether to create tables as LF governed tables." + type: "boolean" + default: false + order: 9 + format: + title: "Output Format *" + type: "object" + description: "Format of the data output." + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "GZIP" + default: "UNCOMPRESSED" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "ZSTD" + default: "SNAPPY" + order: 10 + partitioning: + title: "Choose how to partition data" + description: "Partition data by cursor fields when a cursor field is a date" + type: "string" + enum: + - "NO PARTITIONING" + - "DATE" + - "YEAR" + - "MONTH" + - "DAY" + - "YEAR/MONTH" + - "YEAR/MONTH/DAY" + default: "NO PARTITIONING" + order: 11 + glue_catalog_float_as_decimal: + title: "Glue Catalog: Float as Decimal" + description: + "Cast float/double as decimal(38,18). This can help achieve\ + \ higher accuracy and represent numbers correctly as received from the\ + \ source." + type: "boolean" + default: false + order: 12 + destinationType: + title: "aws-datalake" + const: "aws-datalake" + enum: + - "aws-datalake" + order: 0 + type: "string" + destination-aws-datalake-update: + title: "AWS Datalake Destination Spec" + type: "object" + required: + - "credentials" + - "region" + - "bucket_name" + - "lakeformation_database_name" + properties: + aws_account_id: + type: "string" + title: "AWS Account Id" + description: "target aws account id" + examples: + - "111111111111" + order: 1 + credentials: + title: "Authentication mode" + description: "Choose How to Authenticate to AWS." + type: "object" + oneOf: + - type: "object" + title: "IAM Role" + required: + - "role_arn" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM Role" + enum: + - "IAM Role" + default: "IAM Role" + order: 0 + role_arn: + title: "Target Role Arn" + type: "string" + description: "Will assume this role to write data to s3" + airbyte_secret: false + - type: "object" + title: "IAM User" + required: + - "credentials_title" + - "aws_access_key_id" + - "aws_secret_access_key" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM User" + enum: + - "IAM User" + default: "IAM User" + order: 0 + aws_access_key_id: + title: "Access Key Id" + type: "string" + description: "AWS User Access Key Id" + airbyte_secret: true + aws_secret_access_key: + title: "Secret Access Key" + type: "string" + description: "Secret Access Key" + airbyte_secret: true + order: 2 + region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + order: 4 + bucket_prefix: + title: "Target S3 Bucket Prefix" + type: "string" + description: "S3 prefix" + order: 5 + lakeformation_database_name: + title: "Lake Formation Database Name" + type: "string" + description: + "The default database this destination will use to create tables\ + \ in per stream. Can be changed per connection by customizing the namespace." + order: 6 + lakeformation_database_default_tag_key: + title: "Lake Formation Database Tag Key" + description: "Add a default tag key to databases created by this destination" + examples: + - "pii_level" + type: "string" + order: 7 + lakeformation_database_default_tag_values: + title: "Lake Formation Database Tag Values" + description: + "Add default values for the `Tag Key` to databases created\ + \ by this destination. Comma separate for multiple values." + examples: + - "private,public" + type: "string" + order: 8 + lakeformation_governed_tables: + title: "Lake Formation Governed Tables" + description: "Whether to create tables as LF governed tables." + type: "boolean" + default: false + order: 9 + format: + title: "Output Format *" + type: "object" + description: "Format of the data output." + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "GZIP" + default: "UNCOMPRESSED" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "ZSTD" + default: "SNAPPY" + order: 10 + partitioning: + title: "Choose how to partition data" + description: "Partition data by cursor fields when a cursor field is a date" + type: "string" + enum: + - "NO PARTITIONING" + - "DATE" + - "YEAR" + - "MONTH" + - "DAY" + - "YEAR/MONTH" + - "YEAR/MONTH/DAY" + default: "NO PARTITIONING" + order: 11 + glue_catalog_float_as_decimal: + title: "Glue Catalog: Float as Decimal" + description: + "Cast float/double as decimal(38,18). This can help achieve\ + \ higher accuracy and represent numbers correctly as received from the\ + \ source." + type: "boolean" + default: false + order: 12 + destination-milvus: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "milvus" + const: "milvus" + enum: + - "milvus" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-milvus-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-firebolt: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + - "destinationType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + order: 0 + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: + "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + enum: + - "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + enum: + - "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + x-speakeasy-param-sensitive: true + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + x-speakeasy-param-sensitive: true + destinationType: + title: "firebolt" + const: "firebolt" + enum: + - "firebolt" + order: 0 + type: "string" + destination-firebolt-update: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + order: 0 + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + order: 1 + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: + "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + enum: + - "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + enum: + - "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + destination-google-sheets: + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + - "destinationType" + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "Authentication via Google (OAuth)" + description: + "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + destinationType: + title: "google-sheets" + const: "google-sheets" + enum: + - "google-sheets" + order: 0 + type: "string" + destination-google-sheets-update: + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "Authentication via Google (OAuth)" + description: + "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + destination-astra: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + astra_db_app_token: + title: "Astra DB Application Token" + description: + "The application token authorizes a user to connect to\ + \ a specific Astra DB database. It is created when the user clicks\ + \ the Generate Token button on the Overview tab of the Database page\ + \ in the Astra UI." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + astra_db_endpoint: + title: "Astra DB Endpoint" + description: + "The endpoint specifies which Astra DB database queries\ + \ are sent to. It can be copied from the Database Details section\ + \ of the Overview tab of the Database page in the Astra UI." + pattern: + "^https:\\/\\/([a-z]|[0-9]){8}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){12}-[^\\\ + .]*?\\.apps\\.astra\\.datastax\\.com" + examples: + - "https://8292d414-dd1b-4c33-8431-e838bedc04f7-us-east1.apps.astra.datastax.com" + type: "string" + astra_db_keyspace: + title: "Astra DB Keyspace" + description: + "Keyspaces (or Namespaces) serve as containers for organizing\ + \ data within a database. You can create a new keyspace uisng the\ + \ Data Explorer tab in the Astra UI. The keyspace default_keyspace\ + \ is created for you when you create a Vector Database in Astra DB." + type: "string" + collection: + title: "Astra DB collection" + description: + "Collections hold data. They are analagous to tables in\ + \ traditional Cassandra terminology. This tool will create the collection\ + \ with the provided name automatically if it does not already exist.\ + \ Alternatively, you can create one thorugh the Data Explorer tab\ + \ in the Astra UI." + type: "string" + required: + - "astra_db_app_token" + - "astra_db_endpoint" + - "astra_db_keyspace" + - "collection" + description: + "Astra DB gives developers the APIs, real-time data and ecosystem\ + \ integrations to put accurate RAG and Gen AI apps with fewer hallucinations\ + \ in production." + group: "indexing" + destinationType: + title: "astra" + const: "astra" + enum: + - "astra" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-astra-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + astra_db_app_token: + title: "Astra DB Application Token" + description: + "The application token authorizes a user to connect to\ + \ a specific Astra DB database. It is created when the user clicks\ + \ the Generate Token button on the Overview tab of the Database page\ + \ in the Astra UI." + airbyte_secret: true + type: "string" + astra_db_endpoint: + title: "Astra DB Endpoint" + description: + "The endpoint specifies which Astra DB database queries\ + \ are sent to. It can be copied from the Database Details section\ + \ of the Overview tab of the Database page in the Astra UI." + pattern: + "^https:\\/\\/([a-z]|[0-9]){8}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){12}-[^\\\ + .]*?\\.apps\\.astra\\.datastax\\.com" + examples: + - "https://8292d414-dd1b-4c33-8431-e838bedc04f7-us-east1.apps.astra.datastax.com" + type: "string" + astra_db_keyspace: + title: "Astra DB Keyspace" + description: + "Keyspaces (or Namespaces) serve as containers for organizing\ + \ data within a database. You can create a new keyspace uisng the\ + \ Data Explorer tab in the Astra UI. The keyspace default_keyspace\ + \ is created for you when you create a Vector Database in Astra DB." + type: "string" + collection: + title: "Astra DB collection" + description: + "Collections hold data. They are analagous to tables in\ + \ traditional Cassandra terminology. This tool will create the collection\ + \ with the provided name automatically if it does not already exist.\ + \ Alternatively, you can create one thorugh the Data Explorer tab\ + \ in the Astra UI." + type: "string" + required: + - "astra_db_app_token" + - "astra_db_endpoint" + - "astra_db_keyspace" + - "collection" + description: + "Astra DB gives developers the APIs, real-time data and ecosystem\ + \ integrations to put accurate RAG and Gen AI apps with fewer hallucinations\ + \ in production." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-teradata: + title: "Teradata Destination Spec" + type: "object" + required: + - "host" + - "username" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "airbyte_td" + default: "airbyte_td" + order: 3 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 5 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the destination database\n prefer - Chose this\ + \ mode to allow unencrypted connection only if the destination database\ + \ does not support encryption\n require - Chose this mode to always\ + \ require encryption. If the destination database server does not support\ + \ encryption, connection will fail\n verify-ca - Chose this mode\ + \ to always require encryption and to verify that the destination database\ + \ server has a valid SSL certificate\n verify-full - This is the\ + \ most secure mode. Chose this mode to always require encryption and to\ + \ verify the identity of the destination database server\n See more information\ + \ - in the docs." + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-ca.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-full.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 7 + destinationType: + title: "teradata" + const: "teradata" + enum: + - "teradata" + order: 0 + type: "string" + destination-teradata-update: + title: "Teradata Destination Spec" + type: "object" + required: + - "host" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "airbyte_td" + default: "airbyte_td" + order: 3 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 5 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the destination database\n prefer - Chose this\ + \ mode to allow unencrypted connection only if the destination database\ + \ does not support encryption\n require - Chose this mode to always\ + \ require encryption. If the destination database server does not support\ + \ encryption, connection will fail\n verify-ca - Chose this mode\ + \ to always require encryption and to verify that the destination database\ + \ server has a valid SSL certificate\n verify-full - This is the\ + \ most secure mode. Chose this mode to always require encryption and to\ + \ verify the identity of the destination database server\n See more information\ + \ - in the docs." + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-ca.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-full.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 7 + destination-pinecone: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + description: + "The Pinecone API key to use matching the environment (copy\ + \ from Pinecone console)" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + pinecone_environment: + title: "Pinecone Environment" + description: "Pinecone Cloud environment to use" + examples: + - "us-west1-gcp" + - "gcp-starter" + type: "string" + index: + title: "Index" + description: "Pinecone index in your project to load data into" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: + "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + destinationType: + title: "pinecone" + const: "pinecone" + enum: + - "pinecone" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-pinecone-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + description: + "The Pinecone API key to use matching the environment (copy\ + \ from Pinecone console)" + airbyte_secret: true + type: "string" + pinecone_environment: + title: "Pinecone Environment" + description: "Pinecone Cloud environment to use" + examples: + - "us-west1-gcp" + - "gcp-starter" + type: "string" + index: + title: "Index" + description: "Pinecone index in your project to load data into" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: + "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-duckdb: + title: "Destination Duckdb" + type: "object" + required: + - "destination_path" + - "destinationType" + properties: + motherduck_api_key: + title: "MotherDuck API Key" + type: "string" + description: "API key to use for authentication to a MotherDuck database." + airbyte_secret: true + x-speakeasy-param-sensitive: true + destination_path: + title: "Destination DB" + type: "string" + description: + "Path to the .duckdb file, or the text 'md:' to connect to\ + \ MotherDuck. The file will be placed inside that local mount. For more\ + \ information check out our docs" + examples: + - "/local/destination.duckdb" + - "md:" + - "motherduck:" + schema: + title: "Destination Schema" + type: "string" + description: "Database schema name, default for duckdb is 'main'." + example: "main" + destinationType: + title: "duckdb" + const: "duckdb" + enum: + - "duckdb" + order: 0 + type: "string" + destination-duckdb-update: + title: "Destination Duckdb" + type: "object" + required: + - "destination_path" + properties: + motherduck_api_key: + title: "MotherDuck API Key" + type: "string" + description: "API key to use for authentication to a MotherDuck database." + airbyte_secret: true + destination_path: + title: "Destination DB" + type: "string" + description: + "Path to the .duckdb file, or the text 'md:' to connect to\ + \ MotherDuck. The file will be placed inside that local mount. For more\ + \ information check out our docs" + examples: + - "/local/destination.duckdb" + - "md:" + - "motherduck:" + schema: + title: "Destination Schema" + type: "string" + description: "Database schema name, default for duckdb is 'main'." + example: "main" + destination-iceberg: + title: "Iceberg Destination Spec" + type: "object" + required: + - "catalog_config" + - "storage_config" + - "format_config" + - "destinationType" + properties: + catalog_config: + title: "Iceberg catalog config" + type: "object" + description: "Catalog config of Iceberg." + oneOf: + - title: "HiveCatalog: Use Apache Hive MetaStore" + required: + - "catalog_type" + - "hive_thrift_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hive" + enum: + - "Hive" + order: 0 + hive_thrift_uri: + title: "Hive Metastore thrift uri" + type: "string" + description: "Hive MetaStore thrift server uri of iceberg catalog." + examples: + - "host:port" + order: 1 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 2 + - title: + "HadoopCatalog: Use hierarchical file systems as same as storage\ + \ config" + description: + "A Hadoop catalog doesn’t need to connect to a Hive MetaStore,\ + \ but can only be used with HDFS or similar file systems that support\ + \ atomic rename." + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hadoop" + enum: + - "Hadoop" + order: 0 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 1 + - title: "JdbcCatalog: Use relational database" + description: + "Using a table in a relational database to manage Iceberg\ + \ tables through JDBC. Read more here. Supporting: PostgreSQL" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Jdbc" + enum: + - "Jdbc" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + jdbc_url: + title: "Jdbc url" + type: "string" + examples: + - "jdbc:postgresql://{host}:{port}/{database}" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please\ + \ select one of the connection modes." + type: "boolean" + default: false + order: 5 + catalog_schema: + title: "schema for Iceberg catalog" + description: + "Iceberg catalog metadata tables are written to catalog\ + \ schema. The usual value for this field is \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 6 + - title: "RESTCatalog" + description: + "The RESTCatalog connects to a REST server at the specified\ + \ URI" + required: + - "catalog_type" + - "rest_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Rest" + enum: + - "Rest" + order: 0 + rest_uri: + title: "REST Server URI" + type: "string" + examples: + - "http://localhost:12345" + order: 1 + rest_credential: + title: + "A credential to exchange for a token in the OAuth2 client\ + \ credentials flow." + type: "string" + airbyte_secret: true + examples: + - "username:password" + order: 2 + x-speakeasy-param-sensitive: true + rest_token: + title: + "A Bearer token which will be used for interaction with the\ + \ server." + type: "string" + airbyte_secret: true + examples: + - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + order: 3 + x-speakeasy-param-sensitive: true + - title: "GlueCatalog" + description: "The GlueCatalog connects to a AWS Glue Catalog" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Glue" + enum: + - "Glue" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + order: 0 + storage_config: + title: "Storage config" + type: "object" + description: "Storage config of Iceberg." + oneOf: + - title: "S3" + type: "object" + description: "S3 object storage" + required: + - "storage_type" + - "access_key_id" + - "secret_access_key" + - "s3_warehouse_uri" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "S3" + enum: + - "S3" + order: 0 + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read\ + \ more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + s3_warehouse_uri: + title: "S3 Warehouse Uri for Iceberg" + type: "string" + description: "The Warehouse Uri for Iceberg" + examples: + - "s3a://my-bucket/path/to/warehouse" + - "s3://my-bucket/path/to/warehouse" + order: 2 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + - "localhost:9000" + order: 4 + s3_path_style_access: + type: "boolean" + description: "Use path style access" + examples: + - true + - false + default: true + order: 5 + - title: "Server-managed" + type: "object" + description: "Server-managed object storage" + required: + - "storage_type" + - "managed_warehouse_name" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "MANAGED" + enum: + - "MANAGED" + order: 0 + managed_warehouse_name: + type: "string" + description: "The name of the managed warehouse" + title: "Warehouse name" + order: 0 + order: 1 + format_config: + title: "File format" + type: "object" + required: + - "format" + description: "File format of Iceberg storage." + properties: + format: + title: "File storage format" + type: "string" + default: "Parquet" + description: "" + enum: + - "Parquet" + - "Avro" + order: 0 + flush_batch_size: + title: "Data file flushing batch size" + description: + "Iceberg data file flush batch size. Incoming rows write\ + \ to cache firstly; When cache size reaches this 'batch size', flush\ + \ into real Iceberg data file." + type: "integer" + default: 10000 + order: 1 + auto_compact: + title: "Auto compact data files" + description: "Auto compact data files when stream close" + type: "boolean" + default: false + order: 2 + compact_target_file_size_in_mb: + title: "Target size of compacted data file" + description: + "Specify the target size of Iceberg data file when performing\ + \ a compaction action. " + type: "integer" + default: 100 + order: 3 + order: 2 + destinationType: + title: "iceberg" + const: "iceberg" + enum: + - "iceberg" + order: 0 + type: "string" + destination-iceberg-update: + title: "Iceberg Destination Spec" + type: "object" + required: + - "catalog_config" + - "storage_config" + - "format_config" + properties: + catalog_config: + title: "Iceberg catalog config" + type: "object" + description: "Catalog config of Iceberg." + oneOf: + - title: "HiveCatalog: Use Apache Hive MetaStore" + required: + - "catalog_type" + - "hive_thrift_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hive" + enum: + - "Hive" + order: 0 + hive_thrift_uri: + title: "Hive Metastore thrift uri" + type: "string" + description: "Hive MetaStore thrift server uri of iceberg catalog." + examples: + - "host:port" + order: 1 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 2 + - title: + "HadoopCatalog: Use hierarchical file systems as same as storage\ + \ config" + description: + "A Hadoop catalog doesn’t need to connect to a Hive MetaStore,\ + \ but can only be used with HDFS or similar file systems that support\ + \ atomic rename." + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hadoop" + enum: + - "Hadoop" + order: 0 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 1 + - title: "JdbcCatalog: Use relational database" + description: + "Using a table in a relational database to manage Iceberg\ + \ tables through JDBC. Read more here. Supporting: PostgreSQL" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Jdbc" + enum: + - "Jdbc" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + jdbc_url: + title: "Jdbc url" + type: "string" + examples: + - "jdbc:postgresql://{host}:{port}/{database}" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please\ + \ select one of the connection modes." + type: "boolean" + default: false + order: 5 + catalog_schema: + title: "schema for Iceberg catalog" + description: + "Iceberg catalog metadata tables are written to catalog\ + \ schema. The usual value for this field is \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 6 + - title: "RESTCatalog" + description: + "The RESTCatalog connects to a REST server at the specified\ + \ URI" + required: + - "catalog_type" + - "rest_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Rest" + enum: + - "Rest" + order: 0 + rest_uri: + title: "REST Server URI" + type: "string" + examples: + - "http://localhost:12345" + order: 1 + rest_credential: + title: + "A credential to exchange for a token in the OAuth2 client\ + \ credentials flow." + type: "string" + airbyte_secret: true + examples: + - "username:password" + order: 2 + rest_token: + title: + "A Bearer token which will be used for interaction with the\ + \ server." + type: "string" + airbyte_secret: true + examples: + - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + order: 3 + - title: "GlueCatalog" + description: "The GlueCatalog connects to a AWS Glue Catalog" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Glue" + enum: + - "Glue" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + order: 0 + storage_config: + title: "Storage config" + type: "object" + description: "Storage config of Iceberg." + oneOf: + - title: "S3" + type: "object" + description: "S3 object storage" + required: + - "storage_type" + - "access_key_id" + - "secret_access_key" + - "s3_warehouse_uri" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "S3" + enum: + - "S3" + order: 0 + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read\ + \ more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_warehouse_uri: + title: "S3 Warehouse Uri for Iceberg" + type: "string" + description: "The Warehouse Uri for Iceberg" + examples: + - "s3a://my-bucket/path/to/warehouse" + - "s3://my-bucket/path/to/warehouse" + order: 2 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + - "localhost:9000" + order: 4 + s3_path_style_access: + type: "boolean" + description: "Use path style access" + examples: + - true + - false + default: true + order: 5 + - title: "Server-managed" + type: "object" + description: "Server-managed object storage" + required: + - "storage_type" + - "managed_warehouse_name" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "MANAGED" + enum: + - "MANAGED" + order: 0 + managed_warehouse_name: + type: "string" + description: "The name of the managed warehouse" + title: "Warehouse name" + order: 0 + order: 1 + format_config: + title: "File format" + type: "object" + required: + - "format" + description: "File format of Iceberg storage." + properties: + format: + title: "File storage format" + type: "string" + default: "Parquet" + description: "" + enum: + - "Parquet" + - "Avro" + order: 0 + flush_batch_size: + title: "Data file flushing batch size" + description: + "Iceberg data file flush batch size. Incoming rows write\ + \ to cache firstly; When cache size reaches this 'batch size', flush\ + \ into real Iceberg data file." + type: "integer" + default: 10000 + order: 1 + auto_compact: + title: "Auto compact data files" + description: "Auto compact data files when stream close" + type: "boolean" + default: false + order: 2 + compact_target_file_size_in_mb: + title: "Target size of compacted data file" + description: + "Specify the target size of Iceberg data file when performing\ + \ a compaction action. " + type: "integer" + default: 100 + order: 3 + order: 2 + destination-sftp-json: + title: "Destination SFTP JSON" + type: "object" + required: + - "host" + - "username" + - "password" + - "destination_path" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the SFTP server." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the SFTP server." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - 22 + order: 1 + username: + title: "User" + description: "Username to use to access the SFTP server." + type: "string" + order: 2 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + destination_path: + title: "Destination path" + type: "string" + description: "Path to the directory where json files will be written." + examples: + - "/json_data" + order: 4 + destinationType: + title: "sftp-json" + const: "sftp-json" + enum: + - "sftp-json" + order: 0 + type: "string" + destination-sftp-json-update: + title: "Destination SFTP JSON" + type: "object" + required: + - "host" + - "username" + - "password" + - "destination_path" + properties: + host: + title: "Host" + description: "Hostname of the SFTP server." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the SFTP server." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - 22 + order: 1 + username: + title: "User" + description: "Username to use to access the SFTP server." + type: "string" + order: 2 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 3 + destination_path: + title: "Destination path" + type: "string" + description: "Path to the directory where json files will be written." + examples: + - "/json_data" + order: 4 + destination-s3: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "destinationType" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + always_show: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + always_show: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + role_arn: + type: "string" + description: "The Role ARN" + title: "Role ARN" + examples: + - "arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId" + order: 2 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 3 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 4 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 5 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Avro" + default: "Avro" + order: 0 + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate Level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + order: 1 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + order: 6 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 7 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 8 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 9 + destinationType: + title: "s3" + const: "s3" + enum: + - "s3" + order: 0 + type: "string" + destination-s3-update: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + always_show: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + always_show: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + role_arn: + type: "string" + description: "The Role ARN" + title: "Role ARN" + examples: + - "arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId" + order: 2 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 3 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 4 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 5 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Avro" + default: "Avro" + order: 0 + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate Level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + order: 1 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + order: 6 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 7 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 8 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 9 + destination-redis: + title: "Redis Destination Spec" + type: "object" + required: + - "host" + - "username" + - "port" + - "cache_type" + - "destinationType" + properties: + host: + title: "Host" + description: "Redis host to connect to." + type: "string" + examples: + - "localhost,127.0.0.1" + order: 1 + port: + title: "Port" + description: "Port of Redis." + type: "integer" + minimum: 0 + maximum: 65536 + default: 6379 + order: 2 + username: + title: "Username" + description: "Username associated with Redis." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with Redis." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + type: "boolean" + description: + "Indicates whether SSL encryption protocol will be used to\ + \ connect to Redis. It is recommended to use SSL connection if possible." + default: false + order: 5 + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n
  • verify-full - This is\ + \ the most secure mode. Always require encryption and verifies the identity\ + \ of the source database server" + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + cache_type: + title: "Cache type" + type: "string" + default: "hash" + description: "Redis cache type to store data in." + enum: + - "hash" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "redis" + const: "redis" + enum: + - "redis" + order: 0 + type: "string" + destination-redis-update: + title: "Redis Destination Spec" + type: "object" + required: + - "host" + - "username" + - "port" + - "cache_type" + properties: + host: + title: "Host" + description: "Redis host to connect to." + type: "string" + examples: + - "localhost,127.0.0.1" + order: 1 + port: + title: "Port" + description: "Port of Redis." + type: "integer" + minimum: 0 + maximum: 65536 + default: 6379 + order: 2 + username: + title: "Username" + description: "Username associated with Redis." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with Redis." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + type: "boolean" + description: + "Indicates whether SSL encryption protocol will be used to\ + \ connect to Redis. It is recommended to use SSL connection if possible." + default: false + order: 5 + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n
  • verify-full - This is\ + \ the most secure mode. Always require encryption and verifies the identity\ + \ of the source database server" + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + cache_type: + title: "Cache type" + type: "string" + default: "hash" + description: "Redis cache type to store data in." + enum: + - "hash" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-typesense: + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + - "destinationType" + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: + "Hostname of the Typesense instance without protocol. Accept\ + \ multiple hosts separated by comma." + order: 1 + port: + title: "Port" + type: "string" + description: + "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: + "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "integer" + description: "How many documents should be imported together. Default 1000" + order: 4 + path: + title: "Path" + type: "string" + description: "Path of the Typesense instance. Default is none" + order: 5 + destinationType: + title: "typesense" + const: "typesense" + enum: + - "typesense" + order: 0 + type: "string" + destination-typesense-update: + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: + "Hostname of the Typesense instance without protocol. Accept\ + \ multiple hosts separated by comma." + order: 1 + port: + title: "Port" + type: "string" + description: + "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: + "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "integer" + description: "How many documents should be imported together. Default 1000" + order: 4 + path: + title: "Path" + type: "string" + description: "Path of the Typesense instance. Default is none" + order: 5 + destination-bigquery: + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_location" + - "dataset_id" + - "destinationType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset. Read more here." + title: "Project ID" + group: "connection" + order: 0 + dataset_location: + type: "string" + description: + "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied. Read more here." + title: "Dataset Location" + group: "connection" + order: 1 + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-southwest1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "europe-west7" + - "europe-west8" + - "europe-west9" + - "europe-west12" + - "me-central1" + - "me-central2" + - "me-west1" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "southamerica-east1" + - "southamerica-west1" + - "us-central1" + - "us-east1" + - "us-east2" + - "us-east3" + - "us-east4" + - "us-east5" + - "us-south1" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + dataset_id: + type: "string" + description: + "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" + group: "connection" + order: 2 + loading_method: + type: "object" + title: "Loading Method" + description: "The way data will be uploaded to BigQuery." + display_type: "radio" + group: "connection" + order: 3 + oneOf: + - title: "Batched Standard Inserts" + required: + - "method" + description: + "Direct loading using batched SQL INSERT statements. This\ + \ method uses the BigQuery driver to convert large INSERT statements\ + \ into file uploads automatically." + properties: + method: + type: "string" + const: "Standard" + enum: + - "Standard" + - title: "GCS Staging" + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses COPY INTO to load your data into BigQuery." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + enum: + - "GCS Staging" + credential: + title: "Credential" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read\ + \ more here." + type: "object" + order: 1 + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + order: 0 + enum: + - "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + order: 1 + x-speakeasy-param-sensitive: true + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + order: 2 + x-speakeasy-param-sensitive: true + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the GCS bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + gcs_bucket_path: + title: "GCS Bucket Path" + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + order: 3 + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. By this select you can chose if these records should\ + \ be removed from GCS when migration has finished. The default \"\ + Delete all tmp files from GCS\" value is used if not set explicitly." + title: "GCS Tmp Files Afterward Processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + order: 4 + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Service Account Key JSON (Required for cloud, optional for open-source)" + airbyte_secret: true + group: "connection" + order: 4 + always_show: true + x-speakeasy-param-sensitive: true + transformation_priority: + type: "string" + description: + "Interactive run type means that the query is executed as soon\ + \ as possible, and these queries count towards concurrent rate limit and\ + \ daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources\ + \ are available in the BigQuery shared resource pool, which usually occurs\ + \ within a few minutes. Batch queries don’t count towards your concurrent\ + \ rate limit. Read more about batch queries here. The default \"interactive\" value is used if not set explicitly." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + order: 5 + group: "advanced" + big_query_client_buffer_size_mb: + title: "Google BigQuery Client Chunk Size" + description: + "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\ + \ 15) for each table. The size that will be written by a single RPC. Written\ + \ data will be buffered and only flushed upon reaching this size or closing\ + \ the channel. The default 15MB value is used if not set explicitly. Read\ + \ more here." + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + order: 6 + group: "advanced" + raw_data_dataset: + type: "string" + description: "The dataset to write raw tables into (default: airbyte_internal)" + title: "Raw Table Dataset Name" + order: 7 + group: "advanced" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + group: "advanced" + destinationType: + title: "bigquery" + const: "bigquery" + enum: + - "bigquery" + order: 0 + type: "string" + groups: + - id: "connection" + title: "Connection" + - id: "advanced" + title: "Advanced" + destination-bigquery-update: + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_location" + - "dataset_id" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset. Read more here." + title: "Project ID" + group: "connection" + order: 0 + dataset_location: + type: "string" + description: + "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied. Read more here." + title: "Dataset Location" + group: "connection" + order: 1 + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-southwest1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "europe-west7" + - "europe-west8" + - "europe-west9" + - "europe-west12" + - "me-central1" + - "me-central2" + - "me-west1" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "southamerica-east1" + - "southamerica-west1" + - "us-central1" + - "us-east1" + - "us-east2" + - "us-east3" + - "us-east4" + - "us-east5" + - "us-south1" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + dataset_id: + type: "string" + description: + "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" + group: "connection" + order: 2 + loading_method: + type: "object" + title: "Loading Method" + description: "The way data will be uploaded to BigQuery." + display_type: "radio" + group: "connection" + order: 3 + oneOf: + - title: "Batched Standard Inserts" + required: + - "method" + description: + "Direct loading using batched SQL INSERT statements. This\ + \ method uses the BigQuery driver to convert large INSERT statements\ + \ into file uploads automatically." + properties: + method: + type: "string" + const: "Standard" + enum: + - "Standard" + - title: "GCS Staging" + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses COPY INTO to load your data into BigQuery." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + enum: + - "GCS Staging" + credential: + title: "Credential" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read\ + \ more here." + type: "object" + order: 1 + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + order: 0 + enum: + - "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + order: 1 + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + order: 2 + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the GCS bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + gcs_bucket_path: + title: "GCS Bucket Path" + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + order: 3 + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. By this select you can chose if these records should\ + \ be removed from GCS when migration has finished. The default \"\ + Delete all tmp files from GCS\" value is used if not set explicitly." + title: "GCS Tmp Files Afterward Processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + order: 4 + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Service Account Key JSON (Required for cloud, optional for open-source)" + airbyte_secret: true + group: "connection" + order: 4 + always_show: true + transformation_priority: + type: "string" + description: + "Interactive run type means that the query is executed as soon\ + \ as possible, and these queries count towards concurrent rate limit and\ + \ daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources\ + \ are available in the BigQuery shared resource pool, which usually occurs\ + \ within a few minutes. Batch queries don’t count towards your concurrent\ + \ rate limit. Read more about batch queries here. The default \"interactive\" value is used if not set explicitly." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + order: 5 + group: "advanced" + big_query_client_buffer_size_mb: + title: "Google BigQuery Client Chunk Size" + description: + "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\ + \ 15) for each table. The size that will be written by a single RPC. Written\ + \ data will be buffered and only flushed upon reaching this size or closing\ + \ the channel. The default 15MB value is used if not set explicitly. Read\ + \ more here." + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + order: 6 + group: "advanced" + raw_data_dataset: + type: "string" + description: "The dataset to write raw tables into (default: airbyte_internal)" + title: "Raw Table Dataset Name" + order: 7 + group: "advanced" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + group: "advanced" + groups: + - id: "connection" + title: "Connection" + - id: "advanced" + title: "Advanced" + destination-elasticsearch: + title: "Elasticsearch Connection Configuration" + type: "object" + required: + - "endpoint" + - "destinationType" + properties: + endpoint: + title: "Server Endpoint" + type: "string" + description: "The full url of the Elasticsearch server" + upsert: + type: "boolean" + title: "Upsert Records" + description: + "If a primary key identifier is defined in the source, an upsert\ + \ will be performed using the primary key value as the elasticsearch doc\ + \ id. Does not support composite primary keys." + default: true + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + authenticationMethod: + title: "Authentication Method" + type: "object" + description: "The type of authentication to be used" + oneOf: + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + enum: + - "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + enum: + - "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + enum: + - "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "elasticsearch" + const: "elasticsearch" + enum: + - "elasticsearch" + order: 0 + type: "string" + destination-elasticsearch-update: + title: "Elasticsearch Connection Configuration" + type: "object" + required: + - "endpoint" + properties: + endpoint: + title: "Server Endpoint" + type: "string" + description: "The full url of the Elasticsearch server" + upsert: + type: "boolean" + title: "Upsert Records" + description: + "If a primary key identifier is defined in the source, an upsert\ + \ will be performed using the primary key value as the elasticsearch doc\ + \ id. Does not support composite primary keys." + default: true + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + authenticationMethod: + title: "Authentication Method" + type: "object" + description: "The type of authentication to be used" + oneOf: + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + enum: + - "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + enum: + - "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + enum: + - "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-azure-blob-storage: + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + - "destinationType" + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: + "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + x-speakeasy-param-sensitive: true + azure_blob_storage_output_buffer_size: + title: "Azure Blob Storage output buffer size (Megabytes)" + type: "integer" + description: + "The amount of megabytes to buffer for the output stream to\ + \ Azure. This will impact memory footprint on workers, but may need adjustment\ + \ for performance and appropriate block size in Azure." + minimum: 1 + maximum: 2047 + default: 5 + examples: + - 5 + azure_blob_storage_spill_size: + title: "Azure Blob Storage file spill size" + type: "integer" + description: + "The amount of megabytes after which the connector should spill\ + \ the records in a new blob object. Make sure to configure size greater\ + \ than individual records. Enter 0 if not applicable" + default: 500 + examples: + - 500 + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + enum: + - "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + enum: + - "JSONL" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + destinationType: + title: "azure-blob-storage" + const: "azure-blob-storage" + enum: + - "azure-blob-storage" + order: 0 + type: "string" + destination-azure-blob-storage-update: + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: + "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + azure_blob_storage_output_buffer_size: + title: "Azure Blob Storage output buffer size (Megabytes)" + type: "integer" + description: + "The amount of megabytes to buffer for the output stream to\ + \ Azure. This will impact memory footprint on workers, but may need adjustment\ + \ for performance and appropriate block size in Azure." + minimum: 1 + maximum: 2047 + default: 5 + examples: + - 5 + azure_blob_storage_spill_size: + title: "Azure Blob Storage file spill size" + type: "integer" + description: + "The amount of megabytes after which the connector should spill\ + \ the records in a new blob object. Make sure to configure size greater\ + \ than individual records. Enter 0 if not applicable" + default: 500 + examples: + - 500 + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + enum: + - "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + enum: + - "JSONL" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + destination-pgvector: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Postgres Connection" + type: "object" + properties: + host: + title: "Host" + description: "Enter the account name you want to use to access the database." + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + port: + title: "Port" + description: "Enter the port you want to use to access the database" + default: 5432 + order: 2 + examples: + - "5432" + type: "integer" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + default: "public" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + required: + - "host" + - "database" + - "username" + - "credentials" + description: "Postgres can be used to store vector data and retrieve embeddings." + group: "indexing" + destinationType: + title: "pgvector" + const: "pgvector" + enum: + - "pgvector" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-pgvector-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Postgres Connection" + type: "object" + properties: + host: + title: "Host" + description: "Enter the account name you want to use to access the database." + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + port: + title: "Port" + description: "Enter the port you want to use to access the database" + default: 5432 + order: 2 + examples: + - "5432" + type: "integer" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + default: "public" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + required: + - "password" + required: + - "host" + - "database" + - "username" + - "credentials" + description: "Postgres can be used to store vector data and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-yellowbrick: + title: "Yellowbrick Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "yellowbrick" + const: "yellowbrick" + enum: + - "yellowbrick" + order: 0 + type: "string" + destination-yellowbrick-update: + title: "Yellowbrick Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-postgres: + title: "Postgres Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into" + title: "Raw table schema (defaults to airbyte_internal)" + order: 9 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 10 + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 11 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "postgres" + const: "postgres" + enum: + - "postgres" + order: 0 + type: "string" + destination-postgres-update: + title: "Postgres Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into" + title: "Raw table schema (defaults to airbyte_internal)" + order: 9 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 10 + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 11 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + oneOf: + - title: destination-google-sheets + $ref: "#/components/schemas/destination-google-sheets" + - title: destination-astra + $ref: "#/components/schemas/destination-astra" + - title: destination-aws-datalake + $ref: "#/components/schemas/destination-aws-datalake" + - title: destination-azure-blob-storage + $ref: "#/components/schemas/destination-azure-blob-storage" + - title: destination-bigquery + $ref: "#/components/schemas/destination-bigquery" + - title: destination-clickhouse + $ref: "#/components/schemas/destination-clickhouse" + - title: destination-convex + $ref: "#/components/schemas/destination-convex" + - title: destination-databricks + $ref: "#/components/schemas/destination-databricks" + - title: destination-dev-null + $ref: "#/components/schemas/destination-dev-null" + - title: destination-duckdb + $ref: "#/components/schemas/destination-duckdb" + - title: destination-dynamodb + $ref: "#/components/schemas/destination-dynamodb" + - title: destination-elasticsearch + $ref: "#/components/schemas/destination-elasticsearch" + - title: destination-firebolt + $ref: "#/components/schemas/destination-firebolt" + - title: destination-firestore + $ref: "#/components/schemas/destination-firestore" + - title: destination-gcs + $ref: "#/components/schemas/destination-gcs" + - title: destination-iceberg + $ref: "#/components/schemas/destination-iceberg" + - title: destination-milvus + $ref: "#/components/schemas/destination-milvus" + - title: destination-mongodb + $ref: "#/components/schemas/destination-mongodb" + - title: destination-mssql + $ref: "#/components/schemas/destination-mssql" + - title: destination-mysql + $ref: "#/components/schemas/destination-mysql" + - title: destination-oracle + $ref: "#/components/schemas/destination-oracle" + - title: destination-pgvector + $ref: "#/components/schemas/destination-pgvector" + - title: destination-pinecone + $ref: "#/components/schemas/destination-pinecone" + - title: destination-postgres + $ref: "#/components/schemas/destination-postgres" + - title: destination-pubsub + $ref: "#/components/schemas/destination-pubsub" + - title: destination-qdrant + $ref: "#/components/schemas/destination-qdrant" + - title: destination-redis + $ref: "#/components/schemas/destination-redis" + - title: destination-redshift + $ref: "#/components/schemas/destination-redshift" + - title: destination-s3 + $ref: "#/components/schemas/destination-s3" + - title: destination-s3-glue + $ref: "#/components/schemas/destination-s3-glue" + - title: destination-sftp-json + $ref: "#/components/schemas/destination-sftp-json" + - title: destination-snowflake + $ref: "#/components/schemas/destination-snowflake" + - title: destination-snowflake-cortex + $ref: "#/components/schemas/destination-snowflake-cortex" + - title: destination-teradata + $ref: "#/components/schemas/destination-teradata" + - title: destination-timeplus + $ref: "#/components/schemas/destination-timeplus" + - title: destination-typesense + $ref: "#/components/schemas/destination-typesense" + - title: destination-vectara + $ref: "#/components/schemas/destination-vectara" + - title: destination-weaviate + $ref: "#/components/schemas/destination-weaviate" + - title: destination-yellowbrick + $ref: "#/components/schemas/destination-yellowbrick" + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml new file mode 100644 index 00000000000..8e8a8a8c1f6 --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_jobs.yaml @@ -0,0 +1,1312 @@ +--- +openapi: "3.1.0" +info: + title: "Jobs" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /jobs: + get: + tags: + - "public_jobs" + - "public" + - "Jobs" + parameters: + - name: "connectionId" + description: "Filter the Jobs by connectionId." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of Jobs returned. The default is\ + \ 20 Jobs." + schema: + format: "int32" + default: 20 + maximum: 100 + minimum: 1 + type: "integer" + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning Jobs. The default\ + \ is 0." + schema: + format: "int32" + default: 0 + minimum: 0 + type: "integer" + in: "query" + - name: "jobType" + description: "Filter the Jobs by jobType." + schema: + $ref: "#/components/schemas/JobTypeEnum" + in: "query" + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list jobs for. Empty\ + \ list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "status" + description: "The Job status you want to filter by" + schema: + $ref: "#/components/schemas/JobStatusEnum" + in: "query" + required: false + - name: "createdAtStart" + description: "The start date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "createdAtEnd" + description: "The end date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "updatedAtStart" + description: "The start date to filter by" + schema: + type: "string" + format: "date-time" + example: 1687450500000 + in: "query" + required: false + - name: "updatedAtEnd" + description: "The end date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "orderBy" + description: "The field and method to use for ordering" + schema: + type: "string" + pattern: "\\w+|(ASC|DESC)" + in: "query" + required: false + example: "updatedAt|DESC" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobsResponse" + examples: + Job List Response Example: + value: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + description: "List all the Jobs by connectionId." + "403": + description: "Not allowed" + operationId: "listJobs" + summary: "List Jobs by sync type" + x-speakeasy-alias: "listJobs" + x-speakeasy-group: "Jobs" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/JobCreateRequest" + examples: + Job Creation Request Example: + value: + connectionId: "e735894a-e773-4938-969f-45f53957b75b" + jobType: "sync" + required: true + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Creation Response Example: + value: + jobId: 1234 + status: "running" + jobType: "sync" + description: + "Kicks off a new Job based on the JobType. The connectionId\ + \ is the resource that Job will be run for." + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createJob" + summary: "Trigger a sync or reset job of a connection" + x-speakeasy-alias: "createJob" + x-speakeasy-group: "Jobs" + /jobs/{jobId}: + get: + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Get Response Example: + value: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + description: "Get a Job by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getJob" + x-speakeasy-alias: "getJob" + x-speakeasy-group: "Jobs" + summary: "Get Job status and details" + delete: + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + description: "Cancel a Job." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "cancelJob" + x-speakeasy-alias: "cancelJob" + x-speakeasy-group: "Jobs" + summary: "Cancel a running Job" + parameters: + - name: "jobId" + schema: + format: "int64" + type: "integer" + in: "path" + required: true +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml new file mode 100644 index 00000000000..5cab8a664bf --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_organizations.yaml @@ -0,0 +1,1122 @@ +--- +openapi: "3.1.0" +info: + title: "Organizations" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /organizations: + get: + tags: + - "public_organizations" + - "public" + - "Organizations" + summary: "List all organizations for a user" + description: "Lists users organizations." + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationsResponse" + description: "List user's organizations." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listOrganizationsForUser" + x-speakeasy-alias: "listOrganizationsForUser" + x-speakeasy-group: "Organizations" +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml new file mode 100644 index 00000000000..251d692f503 --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_permissions.yaml @@ -0,0 +1,1258 @@ +--- +openapi: "3.1.0" +info: + title: "Permissions" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /permissions/{permissionId}: + parameters: + - name: "permissionId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + get: + tags: + - "public_permissions" + - "public" + - "Permissions" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: "Get a Permission by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "getPermission" + summary: "Get Permission details" + x-speakeasy-alias: "getPermission" + x-speakeasy-group: "Permissions" + x-speakeasy-entity-operation: "Permission#read" + patch: + tags: + - "public_permissions" + - "public" + - "Permissions" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionUpdateRequest" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: "Successful updated" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "updatePermission" + summary: "Update a permission" + x-speakeasy-alias: "updatePermission" + x-speakeasy-group: "Permissions" + x-speakeasy-entity-operation: "Permission#update" + delete: + tags: + - "public_permissions" + - "public" + - "Permissions" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "deletePermission" + x-speakeasy-alias: "deletePermission" + x-speakeasy-group: "Permissions" + summary: "Delete a Permission" + x-speakeasy-entity-operation: "Permission#delete" + /permissions: + get: + tags: + - "public_permissions" + - "public" + - "Permissions" + parameters: + - name: "userId" + description: "User Id in permission." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "organizationId" + description: + "This is required if you want to read someone else's permissions,\ + \ and you should have organization admin or a higher role." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionsResponse" + description: "List Permissions." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listPermissions" + x-speakeasy-alias: "listPermissions" + x-speakeasy-group: "Permissions" + summary: "List Permissions by user id" + post: + tags: + - "public_permissions" + - "public" + - "Permissions" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionCreateRequest" + examples: + Permission Creation Request Example: + value: + permissionType: "workspace_admin" + userId: "7d08fd6c-531e-4a00-937e-3d355f253e63" + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + examples: + Permission Creation Response Example: + value: + permissionId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + permissionType: "workspace_admin" + userId: "7d08fd6c-531e-4a00-937e-3d355f253e63" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createPermission" + x-speakeasy-alias: "createPermission" + x-speakeasy-group: "Permissions" + summary: "Create a permission" + x-speakeasy-entity-operation: "Permission#create" +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml new file mode 100644 index 00000000000..d91849fa35e --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_sources.yaml @@ -0,0 +1,45484 @@ +--- +openapi: "3.1.0" +info: + title: "Sources" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /sources: + get: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourcesResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listSources" + x-speakeasy-alias: "listSources" + x-speakeasy-group: "Sources" + summary: "List sources" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list sources for. Empty\ + \ list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + example: "df08f6b0-b364-4cc1-9b3f-96f5d2fccfb2,b0796797-de23-4fc7-a5e2-7e131314718c" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted sources in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of sources returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning sources. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCreateRequest" + examples: + Source Creation Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Creation Response Example: + value: + sourceId: "0c31738c-0b2d-4887-b506-e2cd1c39cc35" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSource" + x-speakeasy-alias: "createSource" + x-speakeasy-group: "Sources" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + /sources/{sourceId}: + get: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Get Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSource" + x-speakeasy-alias: "getSource" + x-speakeasy-group: "Sources" + summary: "Get Source details" + patch: + tags: + - "public_sources" + - "public" + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePatchRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a Source" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchSource" + x-speakeasy-alias: "patchSource" + x-speakeasy-group: "Sources" + summary: "Update a Source" + put: + tags: + - "public_sources" + - "public" + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePutRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a source and fully overwrite it" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSource" + x-speakeasy-alias: "putSource" + x-speakeasy-group: "Sources" + summary: "Update a Source and fully overwrite it" + x-speakeasy-entity-operation: "Source#update" + delete: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSource" + x-speakeasy-alias: "deleteSource" + x-speakeasy-group: "Sources" + summary: "Delete a Source" + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources/initiateOAuth: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/InitiateOauthRequest" + required: true + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + $ref: "#/components/responses/InitiateOauthResponse" + "400": + description: "A field in the body has not been set appropriately." + "403": + description: "API key is invalid." + operationId: "initiateOAuth" + x-speakeasy-alias: "initiateOAuth" + x-speakeasy-group: "Sources" + summary: "Initiate OAuth for a source" + description: + "Given a source ID, workspace ID, and redirect URL, initiates OAuth\ + \ for the source.\n\nThis returns a fully formed URL for performing user authentication\ + \ against the relevant source identity provider (IdP). Once authentication\ + \ has been completed, the IdP will redirect to an Airbyte endpoint which will\ + \ save the access and refresh tokens off as a secret and return the secret\ + \ ID to the redirect URL specified in the `secret_id` query string parameter.\n\ + \nThat secret ID can be used to create a source with credentials in place\ + \ of actual tokens." +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + source-trello: + type: "object" + required: + - "key" + - "token" + - "start_date" + - "sourceType" + properties: + key: + type: "string" + title: "API key" + description: + "Trello API key. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + token: + type: "string" + title: "API token" + description: + "Trello API token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 2 + board_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9a-fA-F]{24}$" + title: "Trello Board IDs" + description: + "IDs of the boards to replicate data from. If left empty, data\ + \ from all boards to which you have access will be replicated. Please\ + \ note that this is not the 8-character ID in the board's shortLink (URL\ + \ of the board). Rather, what is required here is the 24-character ID\ + \ usually returned by the API" + order: 3 + sourceType: + title: "trello" + const: "trello" + enum: + - "trello" + order: 0 + type: "string" + source-trello-update: + type: "object" + required: + - "key" + - "token" + - "start_date" + properties: + key: + type: "string" + title: "API key" + description: + "Trello API key. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + token: + type: "string" + title: "API token" + description: + "Trello API token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 2 + board_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9a-fA-F]{24}$" + title: "Trello Board IDs" + description: + "IDs of the boards to replicate data from. If left empty, data\ + \ from all boards to which you have access will be replicated. Please\ + \ note that this is not the 8-character ID in the board's shortLink (URL\ + \ of the board). Rather, what is required here is the 24-character ID\ + \ usually returned by the API" + order: 3 + source-the-guardian-api: + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + description: + "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: + "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: + "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: + "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: + "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + sourceType: + title: "the-guardian-api" + const: "the-guardian-api" + enum: + - "the-guardian-api" + order: 0 + type: "string" + source-the-guardian-api-update: + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + description: + "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: + "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: + "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: + "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: + "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + source-harvest: + title: "Harvest Spec" + type: "object" + required: + - "account_id" + - "replication_start_date" + - "sourceType" + properties: + account_id: + title: "Account ID" + description: + "Harvest account ID. Required for all Harvest requests in pair\ + \ with Personal Access Token" + airbyte_secret: true + type: "string" + order: 0 + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + order: 1 + format: "date-time" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: "object" + order: 3 + oneOf: + - type: "object" + title: "Authenticate via Harvest (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_token: + title: "Personal Access Token" + description: + "Log into Harvest and then create new personal access token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "harvest" + const: "harvest" + enum: + - "harvest" + order: 0 + type: "string" + source-harvest-update: + title: "Harvest Spec" + type: "object" + required: + - "account_id" + - "replication_start_date" + properties: + account_id: + title: "Account ID" + description: + "Harvest account ID. Required for all Harvest requests in pair\ + \ with Personal Access Token" + airbyte_secret: true + type: "string" + order: 0 + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + order: 1 + format: "date-time" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: "object" + order: 3 + oneOf: + - type: "object" + title: "Authenticate via Harvest (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_token: + title: "Personal Access Token" + description: + "Log into Harvest and then create new personal access token." + type: "string" + airbyte_secret: true + source-yotpo: + type: "object" + required: + - "access_token" + - "app_key" + - "start_date" + - "email" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\ + \ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)" + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + app_key: + type: "string" + description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)" + title: "App Key" + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + email: + type: "string" + description: "Email address registered with yotpo." + title: "Registered email address" + default: "example@gmail.com" + order: 3 + sourceType: + title: "yotpo" + const: "yotpo" + enum: + - "yotpo" + order: 0 + type: "string" + source-yotpo-update: + type: "object" + required: + - "access_token" + - "app_key" + - "start_date" + - "email" + properties: + access_token: + type: "string" + description: + "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\ + \ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)" + title: "Access Token" + airbyte_secret: true + order: 0 + app_key: + type: "string" + description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)" + title: "App Key" + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + email: + type: "string" + description: "Email address registered with yotpo." + title: "Registered email address" + default: "example@gmail.com" + order: 3 + source-prestashop: + title: "PrestaShop Spec" + type: "object" + required: + - "access_key" + - "url" + - "start_date" + - "sourceType" + properties: + access_key: + type: "string" + title: "Access Key" + description: + "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + format: "date" + order: 2 + sourceType: + title: "prestashop" + const: "prestashop" + enum: + - "prestashop" + order: 0 + type: "string" + source-prestashop-update: + title: "PrestaShop Spec" + type: "object" + required: + - "access_key" + - "url" + - "start_date" + properties: + access_key: + type: "string" + title: "Access Key" + description: + "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 + airbyte_secret: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + format: "date" + order: 2 + source-netsuite: + title: "Netsuite Spec" + type: "object" + required: + - "realm" + - "consumer_key" + - "consumer_secret" + - "token_key" + - "token_secret" + - "start_datetime" + - "sourceType" + properties: + realm: + type: "string" + title: "Realm (Account Id)" + description: + "Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1,\ + \ as for the `sandbox`" + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + consumer_key: + type: "string" + title: "Consumer Key" + description: "Consumer key associated with your integration" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + consumer_secret: + type: "string" + title: "Consumer Secret" + description: "Consumer secret associated with your integration" + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_key: + type: "string" + title: "Token Key (Token Id)" + description: "Access token key" + order: 3 + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_secret: + type: "string" + title: "Token Secret" + description: "Access token secret" + order: 4 + airbyte_secret: true + x-speakeasy-param-sensitive: true + object_types: + type: "array" + title: "Object Types" + items: + type: "string" + description: + "The API names of the Netsuite objects you want to sync. Setting\ + \ this speeds up the connection setup process by limiting the number of\ + \ schemas that need to be retrieved from Netsuite." + order: 5 + examples: + - "customer" + - "salesorder" + - "etc" + default: [] + start_datetime: + type: "string" + title: "Start Date" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DDTHH:mm:ssZ\"" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + window_in_days: + type: "integer" + title: "Window in Days" + description: + "The amount of days used to query the data with date chunks.\ + \ Set smaller value, if you have lots of data." + order: 7 + default: 30 + sourceType: + title: "netsuite" + const: "netsuite" + enum: + - "netsuite" + order: 0 + type: "string" + source-netsuite-update: + title: "Netsuite Spec" + type: "object" + required: + - "realm" + - "consumer_key" + - "consumer_secret" + - "token_key" + - "token_secret" + - "start_datetime" + properties: + realm: + type: "string" + title: "Realm (Account Id)" + description: + "Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1,\ + \ as for the `sandbox`" + order: 0 + airbyte_secret: true + consumer_key: + type: "string" + title: "Consumer Key" + description: "Consumer key associated with your integration" + order: 1 + airbyte_secret: true + consumer_secret: + type: "string" + title: "Consumer Secret" + description: "Consumer secret associated with your integration" + order: 2 + airbyte_secret: true + token_key: + type: "string" + title: "Token Key (Token Id)" + description: "Access token key" + order: 3 + airbyte_secret: true + token_secret: + type: "string" + title: "Token Secret" + description: "Access token secret" + order: 4 + airbyte_secret: true + object_types: + type: "array" + title: "Object Types" + items: + type: "string" + description: + "The API names of the Netsuite objects you want to sync. Setting\ + \ this speeds up the connection setup process by limiting the number of\ + \ schemas that need to be retrieved from Netsuite." + order: 5 + examples: + - "customer" + - "salesorder" + - "etc" + default: [] + start_datetime: + type: "string" + title: "Start Date" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DDTHH:mm:ssZ\"" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + window_in_days: + type: "integer" + title: "Window in Days" + description: + "The amount of days used to query the data with date chunks.\ + \ Set smaller value, if you have lots of data." + order: 7 + default: 30 + source-convex: + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + - "sourceType" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "convex" + const: "convex" + enum: + - "convex" + order: 0 + type: "string" + source-convex-update: + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + source-recurly: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recurly API Key. See the docs for more information on how to generate this key." + order: 0 + x-speakeasy-param-sensitive: true + begin_time: + type: "string" + description: + "ISO8601 timestamp from which the replication from Recurly\ + \ API will start from." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 1 + end_time: + type: "string" + description: + "ISO8601 timestamp to which the replication from Recurly API\ + \ will stop. Records after that date won't be imported." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 2 + sourceType: + title: "recurly" + const: "recurly" + enum: + - "recurly" + order: 0 + type: "string" + source-recurly-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recurly API Key. See the docs for more information on how to generate this key." + order: 0 + begin_time: + type: "string" + description: + "ISO8601 timestamp from which the replication from Recurly\ + \ API will start from." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 1 + end_time: + type: "string" + description: + "ISO8601 timestamp to which the replication from Recurly API\ + \ will stop. Records after that date won't be imported." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 2 + source-pennylane: + type: "object" + required: + - "start_time" + - "api_key" + - "sourceType" + properties: + start_time: + type: "string" + order: 0 + title: "Start time" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "pennylane" + const: "pennylane" + enum: + - "pennylane" + order: 0 + type: "string" + source-pennylane-update: + type: "object" + required: + - "start_time" + - "api_key" + properties: + start_time: + type: "string" + order: 0 + title: "Start time" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + source-teamwork: + type: "object" + required: + - "username" + - "site_name" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + site_name: + type: "string" + description: "The teamwork site name appearing at the url" + order: 2 + title: "Site Name" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "teamwork" + const: "teamwork" + enum: + - "teamwork" + order: 0 + type: "string" + source-teamwork-update: + type: "object" + required: + - "username" + - "site_name" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + site_name: + type: "string" + description: "The teamwork site name appearing at the url" + order: 2 + title: "Site Name" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-zendesk-chat: + title: "Zendesk Chat Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Chat API, in the format YYYY-MM-DDT00:00:00Z." + examples: + - "2021-02-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + subdomain: + type: "string" + title: "Subdomain" + description: + "Required if you access Zendesk Chat from a Zendesk Support\ + \ subdomain." + default: "" + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "credentials" + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access Token" + required: + - "credentials" + - "access_token" + properties: + credentials: + type: "string" + const: "access_token" + order: 0 + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: "The Access Token to make authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zendesk-chat" + const: "zendesk-chat" + enum: + - "zendesk-chat" + order: 0 + type: "string" + source-zendesk-chat-update: + title: "Zendesk Chat Spec" + type: "object" + required: + - "start_date" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Chat API, in the format YYYY-MM-DDT00:00:00Z." + examples: + - "2021-02-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + subdomain: + type: "string" + title: "Subdomain" + description: + "Required if you access Zendesk Chat from a Zendesk Support\ + \ subdomain." + default: "" + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "credentials" + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + - type: "object" + title: "Access Token" + required: + - "credentials" + - "access_token" + properties: + credentials: + type: "string" + const: "access_token" + order: 0 + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: "The Access Token to make authenticated requests." + airbyte_secret: true + source-when-i-work: + type: "object" + required: + - "email" + - "password" + - "sourceType" + properties: + email: + type: "string" + description: "Email of your when-i-work account" + title: "Email" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + password: + type: "string" + description: "Password for your when-i-work account" + title: "Password" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "when-i-work" + const: "when-i-work" + enum: + - "when-i-work" + order: 0 + type: "string" + source-when-i-work-update: + type: "object" + required: + - "email" + - "password" + properties: + email: + type: "string" + description: "Email of your when-i-work account" + title: "Email" + airbyte_secret: true + order: 0 + password: + type: "string" + description: "Password for your when-i-work account" + title: "Password" + airbyte_secret: true + order: 1 + source-my-hours: + title: "My Hours Spec" + type: "object" + required: + - "email" + - "password" + - "start_date" + - "sourceType" + properties: + email: + title: "Email" + type: "string" + description: "Your My Hours username" + example: "john@doe.com" + password: + title: "Password" + type: "string" + description: "The password associated to the username" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: "Start date for collecting time logs" + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: "Time logs batch size" + description: "Pagination size used for retrieving logs in days" + examples: + - 30 + type: "integer" + minimum: 1 + maximum: 365 + default: 30 + sourceType: + title: "my-hours" + const: "my-hours" + enum: + - "my-hours" + order: 0 + type: "string" + source-my-hours-update: + title: "My Hours Spec" + type: "object" + required: + - "email" + - "password" + - "start_date" + properties: + email: + title: "Email" + type: "string" + description: "Your My Hours username" + example: "john@doe.com" + password: + title: "Password" + type: "string" + description: "The password associated to the username" + airbyte_secret: true + start_date: + title: "Start Date" + description: "Start date for collecting time logs" + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: "Time logs batch size" + description: "Pagination size used for retrieving logs in days" + examples: + - 30 + type: "integer" + minimum: 1 + maximum: 365 + default: 30 + source-7shifts: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token to use for authentication. Generate it in the\ + \ 7shifts Developer Tools." + name: "access_token" + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "7shifts" + const: "7shifts" + enum: + - "7shifts" + order: 0 + type: "string" + source-7shifts-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Access token to use for authentication. Generate it in the\ + \ 7shifts Developer Tools." + name: "access_token" + title: "Access Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-eventbrite: + type: "object" + required: + - "private_token" + - "start_date" + - "sourceType" + properties: + private_token: + type: "string" + description: "The private token to use for authenticating API requests." + name: "private_token" + order: 0 + title: "Private Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "eventbrite" + const: "eventbrite" + enum: + - "eventbrite" + order: 0 + type: "string" + source-eventbrite-update: + type: "object" + required: + - "private_token" + - "start_date" + properties: + private_token: + type: "string" + description: "The private token to use for authenticating API requests." + name: "private_token" + order: 0 + title: "Private Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-klaviyo: + title: "Klaviyo Spec" + type: "object" + properties: + api_key: + type: "string" + title: "Api Key" + description: + "Klaviyo API Key. See our docs if you need help finding this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This field is optional\ + \ - if not provided, all data will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 1 + disable_fetching_predictive_analytics: + type: "boolean" + title: "Disable Fetching Predictive Analytics" + description: + "Certain streams like the profiles stream can retrieve predictive\ + \ analytics data from Klaviyo's API. However, at high volume, this can\ + \ lead to service availability issues on the API which can be improved\ + \ by not fetching this field. WARNING: Enabling this setting will stop\ + \ the \"predictive_analytics\" column from being populated in your downstream\ + \ destination." + order: 2 + sourceType: + title: "klaviyo" + const: "klaviyo" + enum: + - "klaviyo" + order: 0 + type: "string" + required: + - "api_key" + - "sourceType" + source-klaviyo-update: + title: "Klaviyo Spec" + type: "object" + properties: + api_key: + type: "string" + title: "Api Key" + description: + "Klaviyo API Key. See our docs if you need help finding this key." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This field is optional\ + \ - if not provided, all data will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 1 + disable_fetching_predictive_analytics: + type: "boolean" + title: "Disable Fetching Predictive Analytics" + description: + "Certain streams like the profiles stream can retrieve predictive\ + \ analytics data from Klaviyo's API. However, at high volume, this can\ + \ lead to service availability issues on the API which can be improved\ + \ by not fetching this field. WARNING: Enabling this setting will stop\ + \ the \"predictive_analytics\" column from being populated in your downstream\ + \ destination." + order: 2 + required: + - "api_key" + source-datadog: + type: "object" + required: + - "api_key" + - "application_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Datadog API key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + application_key: + type: "string" + description: "Datadog application key" + order: 1 + title: "Application Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + description: + "The search query. This just applies to Incremental syncs.\ + \ If empty, it'll collect all logs." + order: 2 + title: "Query" + start_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This just applies to Incremental\ + \ syncs." + order: 3 + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2023-12-01T00:00:00Z" + site: + type: "string" + description: "The site where Datadog data resides in." + enum: + - "datadoghq.com" + - "us3.datadoghq.com" + - "us5.datadoghq.com" + - "datadoghq.eu" + - "ddog-gov.com" + order: 4 + title: "Site" + default: "datadoghq.com" + end_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ + \ after this date will not be replicated. An empty value will represent\ + \ the current datetime for each execution. This just applies to Incremental\ + \ syncs." + order: 5 + title: "End date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2024-01-01T00:00:00Z" + max_records_per_request: + type: "integer" + description: "Maximum number of records to collect per request." + order: 6 + title: "Max records per requests" + default: 5000 + maximum: 5000 + minimum: 1 + queries: + type: "array" + description: "List of queries to be run and used as inputs." + items: + type: "object" + required: + - "name" + - "data_source" + - "query" + properties: + name: + type: "string" + description: "The variable name for use in queries." + order: 1 + title: "Query Name" + query: + type: "string" + description: "A classic query string." + order: 3 + title: "Query" + data_source: + type: "string" + description: "A data source that is powered by the platform." + enum: + - "metrics" + - "cloud_cost" + - "logs" + - "rum" + order: 2 + title: "Data Source" + order: 7 + title: "Queries" + default: [] + sourceType: + title: "datadog" + const: "datadog" + enum: + - "datadog" + order: 0 + type: "string" + source-datadog-update: + type: "object" + required: + - "api_key" + - "application_key" + properties: + api_key: + type: "string" + description: "Datadog API key" + order: 0 + title: "API Key" + airbyte_secret: true + application_key: + type: "string" + description: "Datadog application key" + order: 1 + title: "Application Key" + airbyte_secret: true + query: + type: "string" + description: + "The search query. This just applies to Incremental syncs.\ + \ If empty, it'll collect all logs." + order: 2 + title: "Query" + start_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This just applies to Incremental\ + \ syncs." + order: 3 + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2023-12-01T00:00:00Z" + site: + type: "string" + description: "The site where Datadog data resides in." + enum: + - "datadoghq.com" + - "us3.datadoghq.com" + - "us5.datadoghq.com" + - "datadoghq.eu" + - "ddog-gov.com" + order: 4 + title: "Site" + default: "datadoghq.com" + end_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ + \ after this date will not be replicated. An empty value will represent\ + \ the current datetime for each execution. This just applies to Incremental\ + \ syncs." + order: 5 + title: "End date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2024-01-01T00:00:00Z" + max_records_per_request: + type: "integer" + description: "Maximum number of records to collect per request." + order: 6 + title: "Max records per requests" + default: 5000 + maximum: 5000 + minimum: 1 + queries: + type: "array" + description: "List of queries to be run and used as inputs." + items: + type: "object" + required: + - "name" + - "data_source" + - "query" + properties: + name: + type: "string" + description: "The variable name for use in queries." + order: 1 + title: "Query Name" + query: + type: "string" + description: "A classic query string." + order: 3 + title: "Query" + data_source: + type: "string" + description: "A data source that is powered by the platform." + enum: + - "metrics" + - "cloud_cost" + - "logs" + - "rum" + order: 2 + title: "Data Source" + order: 7 + title: "Queries" + default: [] + source-luma: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Get your API key on lu.ma Calendars dashboard → Settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "luma" + const: "luma" + enum: + - "luma" + order: 0 + type: "string" + source-luma-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "Get your API key on lu.ma Calendars dashboard → Settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-dockerhub: + type: "object" + required: + - "docker_username" + - "sourceType" + properties: + docker_username: + type: "string" + order: 0 + title: "Docker Username" + description: + "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + sourceType: + title: "dockerhub" + const: "dockerhub" + enum: + - "dockerhub" + order: 0 + type: "string" + source-dockerhub-update: + type: "object" + required: + - "docker_username" + properties: + docker_username: + type: "string" + order: 0 + title: "Docker Username" + description: + "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + source-webflow: + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + - "sourceType" + properties: + site_id: + title: "Site id" + type: "string" + description: + "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + accept_version: + title: "Accept Version" + type: "string" + description: "The version of the Webflow API to use. See https://developers.webflow.com/#versioning" + example: "1.0.0" + order: 2 + sourceType: + title: "webflow" + const: "webflow" + enum: + - "webflow" + order: 0 + type: "string" + source-webflow-update: + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + properties: + site_id: + title: "Site id" + type: "string" + description: + "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + accept_version: + title: "Accept Version" + type: "string" + description: "The version of the Webflow API to use. See https://developers.webflow.com/#versioning" + example: "1.0.0" + order: 2 + source-scryfall: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "scryfall" + const: "scryfall" + enum: + - "scryfall" + order: 0 + type: "string" + source-scryfall-update: + type: "object" + required: [] + properties: {} + source-beamer: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "beamer" + const: "beamer" + enum: + - "beamer" + order: 0 + type: "string" + source-beamer-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-high-level: + type: "object" + required: + - "location_id" + - "api_key" + - "start_date" + - "sourceType" + properties: + location_id: + type: "string" + order: 0 + title: "Location ID" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "high-level" + const: "high-level" + enum: + - "high-level" + order: 0 + type: "string" + source-high-level-update: + type: "object" + required: + - "location_id" + - "api_key" + - "start_date" + properties: + location_id: + type: "string" + order: 0 + title: "Location ID" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-wikipedia-pageviews: + type: "object" + required: + - "access" + - "agent" + - "article" + - "country" + - "end" + - "project" + - "start" + - "sourceType" + properties: + access: + type: "string" + title: "Access" + description: + "If you want to filter by access method, use one of desktop,\ + \ mobile-app or mobile-web. If you are interested in pageviews regardless\ + \ of access method, use all-access." + examples: + - "all-access" + - "desktop" + - "mobile-app" + - "mobile-web" + order: 0 + agent: + type: "string" + title: "Agent" + description: + "If you want to filter by agent type, use one of user, automated\ + \ or spider. If you are interested in pageviews regardless of agent type,\ + \ use all-agents." + examples: + - "all-agents" + - "user" + - "spider" + - "automated" + order: 1 + article: + type: "string" + title: "Article" + description: + "The title of any article in the specified project. Any spaces\ + \ should be replaced with underscores. It also should be URI-encoded,\ + \ so that non-URI-safe characters like %, / or ? are accepted." + examples: + - "Are_You_the_One%3F" + order: 2 + country: + type: "string" + title: "Country" + description: + "The ISO 3166-1 alpha-2 code of a country for which to retrieve\ + \ top articles." + examples: + - "FR" + - "IN" + order: 3 + end: + type: "string" + title: "End" + description: + "The date of the last day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format." + order: 4 + project: + type: "string" + title: "Project" + description: + "If you want to filter by project, use the domain of any Wikimedia\ + \ project." + examples: + - "en.wikipedia.org" + - "www.mediawiki.org" + - "commons.wikimedia.org" + order: 5 + start: + type: "string" + title: "Start" + description: + "The date of the first day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format. Also serves as the date to retrieve data for the top articles." + order: 6 + sourceType: + title: "wikipedia-pageviews" + const: "wikipedia-pageviews" + enum: + - "wikipedia-pageviews" + order: 0 + type: "string" + source-wikipedia-pageviews-update: + type: "object" + required: + - "access" + - "agent" + - "article" + - "country" + - "end" + - "project" + - "start" + properties: + access: + type: "string" + title: "Access" + description: + "If you want to filter by access method, use one of desktop,\ + \ mobile-app or mobile-web. If you are interested in pageviews regardless\ + \ of access method, use all-access." + examples: + - "all-access" + - "desktop" + - "mobile-app" + - "mobile-web" + order: 0 + agent: + type: "string" + title: "Agent" + description: + "If you want to filter by agent type, use one of user, automated\ + \ or spider. If you are interested in pageviews regardless of agent type,\ + \ use all-agents." + examples: + - "all-agents" + - "user" + - "spider" + - "automated" + order: 1 + article: + type: "string" + title: "Article" + description: + "The title of any article in the specified project. Any spaces\ + \ should be replaced with underscores. It also should be URI-encoded,\ + \ so that non-URI-safe characters like %, / or ? are accepted." + examples: + - "Are_You_the_One%3F" + order: 2 + country: + type: "string" + title: "Country" + description: + "The ISO 3166-1 alpha-2 code of a country for which to retrieve\ + \ top articles." + examples: + - "FR" + - "IN" + order: 3 + end: + type: "string" + title: "End" + description: + "The date of the last day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format." + order: 4 + project: + type: "string" + title: "Project" + description: + "If you want to filter by project, use the domain of any Wikimedia\ + \ project." + examples: + - "en.wikipedia.org" + - "www.mediawiki.org" + - "commons.wikimedia.org" + order: 5 + start: + type: "string" + title: "Start" + description: + "The date of the first day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format. Also serves as the date to retrieve data for the top articles." + order: 6 + source-google-directory: + title: "Google Directory Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Google Credentials" + description: + "Google APIs use the OAuth 2.0 protocol for authentication\ + \ and authorization. The Source supports Web server application and Service accounts scenarios." + type: "object" + oneOf: + - title: "Sign in via Google (OAuth)" + description: + "For these scenario user only needs to give permission to\ + \ read Google Directory data." + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Web server app" + order: 0 + enum: + - "Web server app" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of the developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client secret" + type: "string" + description: "The Client Secret of the developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The Token for obtaining a new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Service Account Key" + description: + "For these scenario user should obtain service account's\ + \ credentials from the Google API Console and provide delegated email." + type: "object" + required: + - "credentials_json" + - "email" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Service accounts" + order: 0 + enum: + - "Service accounts" + credentials_json: + type: "string" + title: "Credentials JSON" + description: + "The contents of the JSON service account key. See the\ + \ docs for more information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + type: "string" + title: "Email" + description: + "The email of the user, which has permissions to access\ + \ the Google Workspace Admin APIs." + sourceType: + title: "google-directory" + const: "google-directory" + enum: + - "google-directory" + order: 0 + type: "string" + source-google-directory-update: + title: "Google Directory Spec" + type: "object" + required: [] + properties: + credentials: + title: "Google Credentials" + description: + "Google APIs use the OAuth 2.0 protocol for authentication\ + \ and authorization. The Source supports Web server application and Service accounts scenarios." + type: "object" + oneOf: + - title: "Sign in via Google (OAuth)" + description: + "For these scenario user only needs to give permission to\ + \ read Google Directory data." + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Web server app" + order: 0 + enum: + - "Web server app" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of the developer application." + airbyte_secret: true + client_secret: + title: "Client secret" + type: "string" + description: "The Client Secret of the developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The Token for obtaining a new access token." + airbyte_secret: true + - title: "Service Account Key" + description: + "For these scenario user should obtain service account's\ + \ credentials from the Google API Console and provide delegated email." + type: "object" + required: + - "credentials_json" + - "email" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Service accounts" + order: 0 + enum: + - "Service accounts" + credentials_json: + type: "string" + title: "Credentials JSON" + description: + "The contents of the JSON service account key. See the\ + \ docs for more information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: + "The email of the user, which has permissions to access\ + \ the Google Workspace Admin APIs." + source-smartengage: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "smartengage" + const: "smartengage" + enum: + - "smartengage" + order: 0 + type: "string" + source-smartengage-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + source-outbrain-amplify: + title: "Outbrain Amplify Spec" + type: "object" + required: + - "credentials" + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authentication Method" + description: + "Credentials for making authenticated requests requires either\ + \ username/password or access_token." + default: {} + order: 0 + type: "object" + oneOf: + - title: "Access token" + type: "object" + properties: + type: + title: "Access token is required for authentication requests." + const: "access_token" + type: "string" + enum: + - "access_token" + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "type" + - "access_token" + - title: "Username Password" + type: "object" + properties: + type: + title: + "Both username and password is required for authentication\ + \ request." + const: "username_password" + type: "string" + enum: + - "username_password" + username: + type: "string" + description: "Add Username for authentication." + password: + type: "string" + description: "Add Password for authentication." + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "type" + - "username" + - "password" + report_granularity: + title: "Granularity for periodic reports." + description: + "The granularity used for periodic data in reports. See the docs." + enum: + - "daily" + - "weekly" + - "monthly" + order: 1 + type: "string" + geo_location_breakdown: + title: "Granularity for geo-location region." + description: "The granularity used for geo location data in reports." + enum: + - "country" + - "region" + - "subregion" + order: 2 + type: "string" + start_date: + type: "string" + order: 3 + description: + "Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + order: 4 + description: "Date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + sourceType: + title: "outbrain-amplify" + const: "outbrain-amplify" + enum: + - "outbrain-amplify" + order: 0 + type: "string" + source-outbrain-amplify-update: + title: "Outbrain Amplify Spec" + type: "object" + required: + - "credentials" + - "start_date" + properties: + credentials: + title: "Authentication Method" + description: + "Credentials for making authenticated requests requires either\ + \ username/password or access_token." + default: {} + order: 0 + type: "object" + oneOf: + - title: "Access token" + type: "object" + properties: + type: + title: "Access token is required for authentication requests." + const: "access_token" + type: "string" + enum: + - "access_token" + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + required: + - "type" + - "access_token" + - title: "Username Password" + type: "object" + properties: + type: + title: + "Both username and password is required for authentication\ + \ request." + const: "username_password" + type: "string" + enum: + - "username_password" + username: + type: "string" + description: "Add Username for authentication." + password: + type: "string" + description: "Add Password for authentication." + airbyte_secret: true + required: + - "type" + - "username" + - "password" + report_granularity: + title: "Granularity for periodic reports." + description: + "The granularity used for periodic data in reports. See the docs." + enum: + - "daily" + - "weekly" + - "monthly" + order: 1 + type: "string" + geo_location_breakdown: + title: "Granularity for geo-location region." + description: "The granularity used for geo location data in reports." + enum: + - "country" + - "region" + - "subregion" + order: 2 + type: "string" + start_date: + type: "string" + order: 3 + description: + "Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + order: 4 + description: "Date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + source-k6-cloud: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + title: "Api Token" + description: + "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "k6-cloud" + const: "k6-cloud" + enum: + - "k6-cloud" + order: 0 + type: "string" + source-k6-cloud-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + title: "Api Token" + description: + "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + source-postgres: + title: "Postgres Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "sourceType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + group: "db" + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + group: "db" + database: + title: "Database Name" + description: "Name of the database." + type: "string" + order: 2 + group: "db" + schemas: + title: "Schemas" + description: + "The list of schemas (case sensitive) to sync from. Defaults\ + \ to public." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "public" + order: 3 + group: "db" + username: + title: "Username" + description: "Username to access the database." + type: "string" + order: 4 + group: "auth" + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + group: "auth" + always_show: true + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 6 + group: "advanced" + pattern_descriptor: "key1=value1&key2=value2" + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n Read more in the docs." + type: "object" + order: 8 + group: "security" + oneOf: + - title: "disable" + additionalProperties: true + description: + "Disables encryption of communication between Airbyte and\ + \ source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + order: 0 + enum: + - "disable" + - title: "allow" + additionalProperties: true + description: "Enables encryption only when required by the source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + order: 0 + enum: + - "allow" + - title: "prefer" + additionalProperties: true + description: + "Allows unencrypted connection only if the source database\ + \ does not support encryption." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + order: 0 + enum: + - "prefer" + - title: "require" + additionalProperties: true + description: + "Always require encryption. If the source database server\ + \ does not support encryption, connection will fail." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + order: 0 + enum: + - "require" + - title: "verify-ca" + additionalProperties: true + description: + "Always require encryption and verifies that the source database\ + \ server has a valid SSL certificate." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + order: 0 + enum: + - "verify-ca" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: true + description: + "This is the most secure mode. Always require encryption\ + \ and verifies the identity of the source database server." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + order: 0 + enum: + - "verify-full" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 9 + group: "advanced" + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Write-Ahead Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." + required: + - "method" + - "replication_slot" + - "publication" + additionalProperties: true + properties: + method: + type: "string" + const: "CDC" + order: 1 + enum: + - "CDC" + plugin: + type: "string" + title: "Plugin" + description: + "A logical decoding plugin installed on the PostgreSQL\ + \ server." + enum: + - "pgoutput" + default: "pgoutput" + order: 2 + replication_slot: + type: "string" + title: "Replication Slot" + description: + "A plugin logical replication slot. Read about replication slots." + order: 3 + publication: + type: "string" + title: "Publication" + description: + "A Postgres publication used for consuming changes. Read\ + \ about publications and replication identities." + order: 4 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 1200 seconds. Valid range: 120 seconds to 2400 seconds. Read about\ + \ initial waiting time." + default: 1200 + order: 5 + min: 120 + max: 2400 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 6 + min: 1000 + max: 10000 + lsn_commit_behaviour: + type: "string" + title: "LSN commit behaviour" + description: + "Determines when Airbyte should flush the LSN of processed\ + \ WAL logs in the source database. `After loading Data in the destination`\ + \ is default. If `While reading Data` is selected, in case of a\ + \ downstream failure (while loading data into the destination),\ + \ next sync would result in a full sync." + enum: + - "While reading Data" + - "After loading Data in the destination" + default: "After loading Data in the destination" + order: 7 + heartbeat_action_query: + type: "string" + title: "Debezium heartbeat query (Advanced)" + description: + "Specifies a query that the connector executes on the\ + \ source database when the connector sends a heartbeat message.\ + \ Please see the setup guide for how and when to configure this setting." + default: "" + order: 8 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 9 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 10 + - title: "Detect Changes with Xmin System Column" + description: + "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Suitable for databases that have low transaction\ + \ pressure." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + order: 8 + enum: + - "Standard" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + group: "security" + sourceType: + title: "postgres" + const: "postgres" + enum: + - "postgres" + order: 0 + type: "string" + groups: + - id: "db" + - id: "auth" + - id: "security" + title: "Security" + - id: "advanced" + title: "Advanced" + source-postgres-update: + title: "Postgres Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + group: "db" + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + group: "db" + database: + title: "Database Name" + description: "Name of the database." + type: "string" + order: 2 + group: "db" + schemas: + title: "Schemas" + description: + "The list of schemas (case sensitive) to sync from. Defaults\ + \ to public." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "public" + order: 3 + group: "db" + username: + title: "Username" + description: "Username to access the database." + type: "string" + order: 4 + group: "auth" + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + group: "auth" + always_show: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 6 + group: "advanced" + pattern_descriptor: "key1=value1&key2=value2" + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n Read more in the docs." + type: "object" + order: 8 + group: "security" + oneOf: + - title: "disable" + additionalProperties: true + description: + "Disables encryption of communication between Airbyte and\ + \ source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + order: 0 + enum: + - "disable" + - title: "allow" + additionalProperties: true + description: "Enables encryption only when required by the source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + order: 0 + enum: + - "allow" + - title: "prefer" + additionalProperties: true + description: + "Allows unencrypted connection only if the source database\ + \ does not support encryption." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + order: 0 + enum: + - "prefer" + - title: "require" + additionalProperties: true + description: + "Always require encryption. If the source database server\ + \ does not support encryption, connection will fail." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + order: 0 + enum: + - "require" + - title: "verify-ca" + additionalProperties: true + description: + "Always require encryption and verifies that the source database\ + \ server has a valid SSL certificate." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + order: 0 + enum: + - "verify-ca" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: true + description: + "This is the most secure mode. Always require encryption\ + \ and verifies the identity of the source database server." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + order: 0 + enum: + - "verify-full" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 9 + group: "advanced" + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Write-Ahead Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." + required: + - "method" + - "replication_slot" + - "publication" + additionalProperties: true + properties: + method: + type: "string" + const: "CDC" + order: 1 + enum: + - "CDC" + plugin: + type: "string" + title: "Plugin" + description: + "A logical decoding plugin installed on the PostgreSQL\ + \ server." + enum: + - "pgoutput" + default: "pgoutput" + order: 2 + replication_slot: + type: "string" + title: "Replication Slot" + description: + "A plugin logical replication slot. Read about replication slots." + order: 3 + publication: + type: "string" + title: "Publication" + description: + "A Postgres publication used for consuming changes. Read\ + \ about publications and replication identities." + order: 4 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 1200 seconds. Valid range: 120 seconds to 2400 seconds. Read about\ + \ initial waiting time." + default: 1200 + order: 5 + min: 120 + max: 2400 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 6 + min: 1000 + max: 10000 + lsn_commit_behaviour: + type: "string" + title: "LSN commit behaviour" + description: + "Determines when Airbyte should flush the LSN of processed\ + \ WAL logs in the source database. `After loading Data in the destination`\ + \ is default. If `While reading Data` is selected, in case of a\ + \ downstream failure (while loading data into the destination),\ + \ next sync would result in a full sync." + enum: + - "While reading Data" + - "After loading Data in the destination" + default: "After loading Data in the destination" + order: 7 + heartbeat_action_query: + type: "string" + title: "Debezium heartbeat query (Advanced)" + description: + "Specifies a query that the connector executes on the\ + \ source database when the connector sends a heartbeat message.\ + \ Please see the setup guide for how and when to configure this setting." + default: "" + order: 8 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 9 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 10 + - title: "Detect Changes with Xmin System Column" + description: + "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Suitable for databases that have low transaction\ + \ pressure." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + order: 8 + enum: + - "Standard" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + group: "security" + groups: + - id: "db" + - id: "auth" + - id: "security" + title: "Security" + - id: "advanced" + title: "Advanced" + source-buildkite: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "buildkite" + const: "buildkite" + enum: + - "buildkite" + order: 0 + type: "string" + source-buildkite-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-fauna: + title: "Fauna Spec" + type: "object" + required: + - "domain" + - "port" + - "scheme" + - "secret" + - "sourceType" + properties: + domain: + order: 0 + type: "string" + title: "Domain" + description: + "Domain of Fauna to query. Defaults db.fauna.com. See the\ + \ docs." + default: "db.fauna.com" + port: + order: 1 + type: "integer" + title: "Port" + description: "Endpoint port." + default: 443 + scheme: + order: 2 + type: "string" + title: "Scheme" + description: "URL scheme." + default: "https" + secret: + order: 3 + type: "string" + title: "Fauna Secret" + description: "Fauna secret, used when authenticating with the database." + airbyte_secret: true + x-speakeasy-param-sensitive: true + collection: + order: 5 + type: "object" + title: "Collection" + description: "Settings for the Fauna Collection." + required: + - "page_size" + - "deletions" + properties: + page_size: + order: 4 + type: "integer" + title: "Page Size" + default: 64 + description: + "The page size used when reading documents from the database.\ + \ The larger the page size, the faster the connector processes documents.\ + \ However, if a page is too large, the connector may fail.
    \n\ + Choose your page size based on how large the documents are.
    \n\ + See the docs." + deletions: + order: 5 + type: "object" + title: "Deletion Mode" + description: + "This only applies to incremental syncs.
    \n\ + Enabling deletion mode informs your destination of deleted documents.
    \n\ + Disabled - Leave this feature disabled, and ignore deleted documents.
    \n\ + Enabled - Enables this feature. When a document is deleted, the connector\ + \ exports a record with a \"deleted at\" column containing the time\ + \ that the document was deleted." + oneOf: + - title: "Disabled" + type: "object" + order: 0 + required: + - "deletion_mode" + properties: + deletion_mode: + type: "string" + const: "ignore" + enum: + - "ignore" + - title: "Enabled" + type: "object" + order: 1 + required: + - "deletion_mode" + - "column" + properties: + deletion_mode: + type: "string" + const: "deleted_field" + enum: + - "deleted_field" + column: + type: "string" + title: "Deleted At Column" + description: 'Name of the "deleted at" column.' + default: "deleted_at" + sourceType: + title: "fauna" + const: "fauna" + enum: + - "fauna" + order: 0 + type: "string" + source-fauna-update: + title: "Fauna Spec" + type: "object" + required: + - "domain" + - "port" + - "scheme" + - "secret" + properties: + domain: + order: 0 + type: "string" + title: "Domain" + description: + "Domain of Fauna to query. Defaults db.fauna.com. See the\ + \ docs." + default: "db.fauna.com" + port: + order: 1 + type: "integer" + title: "Port" + description: "Endpoint port." + default: 443 + scheme: + order: 2 + type: "string" + title: "Scheme" + description: "URL scheme." + default: "https" + secret: + order: 3 + type: "string" + title: "Fauna Secret" + description: "Fauna secret, used when authenticating with the database." + airbyte_secret: true + collection: + order: 5 + type: "object" + title: "Collection" + description: "Settings for the Fauna Collection." + required: + - "page_size" + - "deletions" + properties: + page_size: + order: 4 + type: "integer" + title: "Page Size" + default: 64 + description: + "The page size used when reading documents from the database.\ + \ The larger the page size, the faster the connector processes documents.\ + \ However, if a page is too large, the connector may fail.
    \n\ + Choose your page size based on how large the documents are.
    \n\ + See the docs." + deletions: + order: 5 + type: "object" + title: "Deletion Mode" + description: + "This only applies to incremental syncs.
    \n\ + Enabling deletion mode informs your destination of deleted documents.
    \n\ + Disabled - Leave this feature disabled, and ignore deleted documents.
    \n\ + Enabled - Enables this feature. When a document is deleted, the connector\ + \ exports a record with a \"deleted at\" column containing the time\ + \ that the document was deleted." + oneOf: + - title: "Disabled" + type: "object" + order: 0 + required: + - "deletion_mode" + properties: + deletion_mode: + type: "string" + const: "ignore" + enum: + - "ignore" + - title: "Enabled" + type: "object" + order: 1 + required: + - "deletion_mode" + - "column" + properties: + deletion_mode: + type: "string" + const: "deleted_field" + enum: + - "deleted_field" + column: + type: "string" + title: "Deleted At Column" + description: 'Name of the "deleted at" column.' + default: "deleted_at" + source-twilio: + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + - "sourceType" + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + x-speakeasy-param-sensitive: true + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + x-speakeasy-param-sensitive: true + start_date: + title: "Replication Start Date" + description: + "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + format: "date-time" + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + sourceType: + title: "twilio" + const: "twilio" + enum: + - "twilio" + order: 0 + type: "string" + source-twilio-update: + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + start_date: + title: "Replication Start Date" + description: + "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + format: "date-time" + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + source-sendgrid: + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 0 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + description: + "Sendgrid API Key, use admin to generate this key." + x-speakeasy-param-sensitive: true + sourceType: + title: "sendgrid" + const: "sendgrid" + enum: + - "sendgrid" + order: 0 + type: "string" + source-sendgrid-update: + type: "object" + required: + - "start_date" + - "api_key" + properties: + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 0 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + description: + "Sendgrid API Key, use admin to generate this key." + source-gnews: + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + order: 1 + title: "Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: + "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: + "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: + "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: + "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 7 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: + "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 8 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 9 + title: "Top Headlines Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 10 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + sourceType: + title: "gnews" + const: "gnews" + enum: + - "gnews" + order: 0 + type: "string" + source-gnews-update: + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + query: + type: "string" + order: 1 + title: "Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: + "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: + "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: + "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: + "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 7 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: + "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 8 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 9 + title: "Top Headlines Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 10 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + source-google-ads: + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + type: "object" + description: "" + title: "Google Credentials" + order: 0 + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + order: 3 + description: + "The token used to obtain a new Access Token. For detailed\ + \ instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + order: 4 + description: + "The Access Token for making authenticated requests. For\ + \ detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + customer_id: + title: "Customer ID(s)" + type: "string" + description: + "Comma-separated list of (client) customer IDs. Each customer\ + \ ID must be specified as a 10-digit number without dashes. For detailed\ + \ instructions on finding this value, refer to our documentation." + pattern: "^[0-9]{10}(,[0-9]{10})*$" + pattern_descriptor: + "The customer ID must be 10 digits. Separate multiple\ + \ customer IDs using commas." + examples: + - "6783948572,5839201945" + order: 1 + customer_status_filter: + title: "Customer Statuses Filter" + description: + "A list of customer statuses to filter on. For detailed info\ + \ about what each status mean refer to Google Ads documentation." + default: [] + order: 2 + type: "array" + items: + title: "CustomerStatus" + description: "An enumeration." + enum: + - "UNKNOWN" + - "ENABLED" + - "CANCELED" + - "SUSPENDED" + - "CLOSED" + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-25" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. (Default value of today is used if not set)" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-30" + order: 4 + format: "date" + custom_queries_array: + type: "array" + title: "Custom GAQL Queries" + description: "" + order: 5 + items: + type: "object" + required: + - "query" + - "table_name" + properties: + query: + type: "string" + multiline: true + title: "Custom Query" + description: + "A custom defined GAQL query for building the report.\ + \ Avoid including the segments.date field; wherever possible, Airbyte\ + \ will automatically include it for incremental syncs. For more\ + \ information, refer to Google's documentation." + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination Table Name" + description: + "The table name in your destination database for the\ + \ chosen query." + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: + "A conversion window is the number of days after an ad interaction\ + \ (such as an ad click or video view) during which a conversion, such\ + \ as a purchase, is recorded in Google Ads. For more information, see\ + \ Google's documentation." + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + order: 6 + sourceType: + title: "google-ads" + const: "google-ads" + enum: + - "google-ads" + order: 0 + type: "string" + source-google-ads-update: + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + type: "object" + description: "" + title: "Google Credentials" + order: 0 + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + order: 3 + description: + "The token used to obtain a new Access Token. For detailed\ + \ instructions on finding this value, refer to our documentation." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + order: 4 + description: + "The Access Token for making authenticated requests. For\ + \ detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + customer_id: + title: "Customer ID(s)" + type: "string" + description: + "Comma-separated list of (client) customer IDs. Each customer\ + \ ID must be specified as a 10-digit number without dashes. For detailed\ + \ instructions on finding this value, refer to our documentation." + pattern: "^[0-9]{10}(,[0-9]{10})*$" + pattern_descriptor: + "The customer ID must be 10 digits. Separate multiple\ + \ customer IDs using commas." + examples: + - "6783948572,5839201945" + order: 1 + customer_status_filter: + title: "Customer Statuses Filter" + description: + "A list of customer statuses to filter on. For detailed info\ + \ about what each status mean refer to Google Ads documentation." + default: [] + order: 2 + type: "array" + items: + title: "CustomerStatus" + description: "An enumeration." + enum: + - "UNKNOWN" + - "ENABLED" + - "CANCELED" + - "SUSPENDED" + - "CLOSED" + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-25" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. (Default value of today is used if not set)" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-30" + order: 4 + format: "date" + custom_queries_array: + type: "array" + title: "Custom GAQL Queries" + description: "" + order: 5 + items: + type: "object" + required: + - "query" + - "table_name" + properties: + query: + type: "string" + multiline: true + title: "Custom Query" + description: + "A custom defined GAQL query for building the report.\ + \ Avoid including the segments.date field; wherever possible, Airbyte\ + \ will automatically include it for incremental syncs. For more\ + \ information, refer to Google's documentation." + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination Table Name" + description: + "The table name in your destination database for the\ + \ chosen query." + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: + "A conversion window is the number of days after an ad interaction\ + \ (such as an ad click or video view) during which a conversion, such\ + \ as a purchase, is recorded in Google Ads. For more information, see\ + \ Google's documentation." + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + order: 6 + source-google-search-console: + title: "Google Search Console Spec" + type: "object" + required: + - "site_urls" + - "authorization" + - "sourceType" + properties: + site_urls: + type: "array" + items: + type: "string" + title: "Website URL Property" + description: + "The URLs of the website property attached to your GSC account.\ + \ Learn more about properties here." + examples: + - "https://example1.com/" + - "sc-domain:example2.com" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true + order: 1 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." + examples: + - "2021-12-12" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + order: 2 + format: "date" + authorization: + type: "object" + title: "Authentication Type" + description: "" + order: 3 + oneOf: + - title: "OAuth" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "Access token for making authenticated requests. Read\ + \ more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "The token for obtaining a new access token. Read more\ + \ here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + title: "Admin Email" + type: "string" + description: + "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: + "A list of available dimensions. Please note, that for\ + \ technical reasons `date` is the default dimension which will be\ + \ included in your query whether you specify it or not. Primary\ + \ key will consist of your custom dimensions and the default dimension\ + \ along with `site_url` and `search_type`." + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + default: + - "date" + minItems: 0 + required: + - "name" + - "dimensions" + data_state: + type: "string" + title: "Data Freshness" + enum: + - "final" + - "all" + description: + "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." + examples: + - "final" + - "all" + default: "final" + order: 6 + sourceType: + title: "google-search-console" + const: "google-search-console" + enum: + - "google-search-console" + order: 0 + type: "string" + source-google-search-console-update: + title: "Google Search Console Spec" + type: "object" + required: + - "site_urls" + - "authorization" + properties: + site_urls: + type: "array" + items: + type: "string" + title: "Website URL Property" + description: + "The URLs of the website property attached to your GSC account.\ + \ Learn more about properties here." + examples: + - "https://example1.com/" + - "sc-domain:example2.com" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true + order: 1 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." + examples: + - "2021-12-12" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + order: 2 + format: "date" + authorization: + type: "object" + title: "Authentication Type" + description: "" + order: 3 + oneOf: + - title: "OAuth" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "Access token for making authenticated requests. Read\ + \ more here." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "The token for obtaining a new access token. Read more\ + \ here." + airbyte_secret: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + email: + title: "Admin Email" + type: "string" + description: + "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: + "A list of available dimensions. Please note, that for\ + \ technical reasons `date` is the default dimension which will be\ + \ included in your query whether you specify it or not. Primary\ + \ key will consist of your custom dimensions and the default dimension\ + \ along with `site_url` and `search_type`." + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + default: + - "date" + minItems: 0 + required: + - "name" + - "dimensions" + data_state: + type: "string" + title: "Data Freshness" + enum: + - "final" + - "all" + description: + "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." + examples: + - "final" + - "all" + default: "final" + order: 6 + source-kyve: + title: "KYVE Spec" + type: "object" + required: + - "pool_ids" + - "start_ids" + - "url_base" + - "sourceType" + properties: + pool_ids: + type: "string" + title: "Pool-IDs" + description: + "The IDs of the KYVE storage pool you want to archive. (Comma\ + \ separated)" + order: 0 + examples: + - "0" + - "0,1" + start_ids: + type: "string" + title: "Bundle-Start-IDs" + description: + "The start-id defines, from which bundle id the pipeline should\ + \ start to extract the data. (Comma separated)" + order: 1 + examples: + - "0" + - "0,0" + url_base: + type: "string" + title: "KYVE-API URL Base" + description: "URL to the KYVE Chain API." + default: "https://api.kyve.network" + order: 2 + examples: + - "https://api.kaon.kyve.network/" + - "https://api.korellia.kyve.network/" + sourceType: + title: "kyve" + const: "kyve" + enum: + - "kyve" + order: 0 + type: "string" + source-kyve-update: + title: "KYVE Spec" + type: "object" + required: + - "pool_ids" + - "start_ids" + - "url_base" + properties: + pool_ids: + type: "string" + title: "Pool-IDs" + description: + "The IDs of the KYVE storage pool you want to archive. (Comma\ + \ separated)" + order: 0 + examples: + - "0" + - "0,1" + start_ids: + type: "string" + title: "Bundle-Start-IDs" + description: + "The start-id defines, from which bundle id the pipeline should\ + \ start to extract the data. (Comma separated)" + order: 1 + examples: + - "0" + - "0,0" + url_base: + type: "string" + title: "KYVE-API URL Base" + description: "URL to the KYVE Chain API." + default: "https://api.kyve.network" + order: 2 + examples: + - "https://api.kaon.kyve.network/" + - "https://api.korellia.kyve.network/" + source-strava: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "athlete_id" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + description: "The Client ID of your Strava developer application." + title: "Client ID" + pattern: "^[0-9_\\-]+$" + examples: + - "12345" + order: 0 + client_secret: + type: "string" + description: "The Client Secret of your Strava developer application." + title: "Client Secret" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + description: "The Refresh Token with the activity: read_all permissions." + title: "Refresh Token" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + athlete_id: + type: "integer" + description: "The Athlete ID of your Strava developer application." + title: "Athlete ID" + pattern: "^[0-9_\\-]+$" + examples: + - "17831421" + order: 3 + start_date: + type: "string" + description: "UTC date and time. Any data before this date will not be replicated." + title: "Start Date" + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 4 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 5 + sourceType: + title: "strava" + const: "strava" + enum: + - "strava" + order: 0 + type: "string" + source-strava-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "athlete_id" + - "start_date" + properties: + client_id: + type: "string" + description: "The Client ID of your Strava developer application." + title: "Client ID" + pattern: "^[0-9_\\-]+$" + examples: + - "12345" + order: 0 + client_secret: + type: "string" + description: "The Client Secret of your Strava developer application." + title: "Client Secret" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 1 + refresh_token: + type: "string" + description: "The Refresh Token with the activity: read_all permissions." + title: "Refresh Token" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 2 + athlete_id: + type: "integer" + description: "The Athlete ID of your Strava developer application." + title: "Athlete ID" + pattern: "^[0-9_\\-]+$" + examples: + - "17831421" + order: 3 + start_date: + type: "string" + description: "UTC date and time. Any data before this date will not be replicated." + title: "Start Date" + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 4 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 5 + source-smaily: + type: "object" + required: + - "api_password" + - "api_subdomain" + - "api_username" + - "sourceType" + properties: + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + order: 1 + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + order: 2 + sourceType: + title: "smaily" + const: "smaily" + enum: + - "smaily" + order: 0 + type: "string" + source-smaily-update: + type: "object" + required: + - "api_password" + - "api_subdomain" + - "api_username" + properties: + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + order: 0 + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + order: 1 + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + order: 2 + source-height: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + search_query: + type: "string" + description: "Search query to be used with search stream" + title: "search_query" + default: "task" + order: 2 + sourceType: + title: "height" + const: "height" + enum: + - "height" + order: 0 + type: "string" + source-height-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + search_query: + type: "string" + description: "Search query to be used with search stream" + title: "search_query" + default: "task" + order: 2 + source-piwik: + type: "object" + required: + - "client_id" + - "client_secret" + - "organization_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: "The organization id appearing at URL of your piwik website" + order: 2 + title: "Organization ID" + sourceType: + title: "piwik" + const: "piwik" + enum: + - "piwik" + order: 0 + type: "string" + source-piwik-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "organization_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + organization_id: + type: "string" + description: "The organization id appearing at URL of your piwik website" + order: 2 + title: "Organization ID" + source-polygon-stock-api: + type: "object" + required: + - "apiKey" + - "end_date" + - "multiplier" + - "start_date" + - "stocksTicker" + - "timespan" + - "sourceType" + properties: + sort: + type: "string" + order: 5 + title: "Sort" + examples: + - "asc" + - "desc" + description: + "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + limit: + type: "integer" + order: 3 + title: "Limit" + examples: + - 100 + - 120 + description: "The target date for the aggregate window." + apiKey: + type: "string" + order: 1 + title: "API Key" + description: "Your API ACCESS Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + adjusted: + type: "string" + order: 0 + title: "Adjusted" + examples: + - "true" + - "false" + description: + "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + end_date: + type: "string" + order: 2 + title: "End Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The target date for the aggregate window." + timespan: + type: "string" + order: 8 + title: "Timespan" + examples: + - "day" + description: "The size of the time window." + multiplier: + type: "integer" + order: 4 + title: "Multiplier" + examples: + - 1 + - 2 + description: "The size of the timespan multiplier." + start_date: + type: "string" + order: 6 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The beginning date for the aggregate window." + stocksTicker: + type: "string" + order: 7 + title: "Stock Ticker" + examples: + - "IBM" + - "MSFT" + description: "The exchange symbol that this item is traded under." + sourceType: + title: "polygon-stock-api" + const: "polygon-stock-api" + enum: + - "polygon-stock-api" + order: 0 + type: "string" + source-polygon-stock-api-update: + type: "object" + required: + - "apiKey" + - "end_date" + - "multiplier" + - "start_date" + - "stocksTicker" + - "timespan" + properties: + sort: + type: "string" + order: 5 + title: "Sort" + examples: + - "asc" + - "desc" + description: + "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + limit: + type: "integer" + order: 3 + title: "Limit" + examples: + - 100 + - 120 + description: "The target date for the aggregate window." + apiKey: + type: "string" + order: 1 + title: "API Key" + description: "Your API ACCESS Key" + airbyte_secret: true + adjusted: + type: "string" + order: 0 + title: "Adjusted" + examples: + - "true" + - "false" + description: + "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + end_date: + type: "string" + order: 2 + title: "End Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The target date for the aggregate window." + timespan: + type: "string" + order: 8 + title: "Timespan" + examples: + - "day" + description: "The size of the time window." + multiplier: + type: "integer" + order: 4 + title: "Multiplier" + examples: + - 1 + - 2 + description: "The size of the timespan multiplier." + start_date: + type: "string" + order: 6 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The beginning date for the aggregate window." + stocksTicker: + type: "string" + order: 7 + title: "Stock Ticker" + examples: + - "IBM" + - "MSFT" + description: "The exchange symbol that this item is traded under." + source-shopify: + title: "Shopify Source CDK Specifications" + type: "object" + required: + - "shop" + - "sourceType" + properties: + shop: + type: "string" + title: "Shopify Store" + description: + "The name of your Shopify store found in the URL. For example,\ + \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ + \ or 'NAME.myshopify.com'." + pattern: "^(?!https://)(?!https://).*" + examples: + - "my-store" + - "my-store.myshopify.com" + order: 1 + credentials: + title: "Shopify Authorization Method" + description: "The authorization method to use to retrieve data from Shopify" + type: "object" + order: 2 + oneOf: + - type: "object" + title: "OAuth2.0" + description: "OAuth2.0" + required: + - "auth_method" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "The Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: + "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Replication Start Date" + description: + "The date you would like to replicate data from. Format: YYYY-MM-DD.\ + \ Any data before this date will not be replicated." + default: "2020-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 3 + bulk_window_in_days: + type: "integer" + title: "GraphQL BULK Date Range in Days" + description: "Defines what would be a date range per single BULK Job" + default: 30 + fetch_transactions_user_id: + type: "boolean" + title: "Add `user_id` to Transactions (slower)" + description: + "Defines which API type (REST/BULK) to use to fetch `Transactions`\ + \ data. If you are a `Shopify Plus` user, leave the default value to speed\ + \ up the fetch." + default: false + job_product_variants_include_pres_prices: + type: "boolean" + title: "Add `Presentment prices` to Product Variants" + description: + "If enabled, the `Product Variants` stream attempts to include\ + \ `Presentment prices` field (may affect the performance)." + default: true + job_termination_threshold: + type: "integer" + title: "BULK Job termination threshold" + description: + "The max time in seconds, after which the single BULK Job should\ + \ be `CANCELED` and retried. The bigger the value the longer the BULK\ + \ Job is allowed to run." + default: 7200 + minimum: 3600 + maximum: 21600 + job_checkpoint_interval: + type: "integer" + title: "BULK Job checkpoint (rows collected)" + description: "The threshold, after which the single BULK Job should be checkpointed." + default: 100000 + minimum: 15000 + maximum: 200000 + sourceType: + title: "shopify" + const: "shopify" + enum: + - "shopify" + order: 0 + type: "string" + source-shopify-update: + title: "Shopify Source CDK Specifications" + type: "object" + required: + - "shop" + properties: + shop: + type: "string" + title: "Shopify Store" + description: + "The name of your Shopify store found in the URL. For example,\ + \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ + \ or 'NAME.myshopify.com'." + pattern: "^(?!https://)(?!https://).*" + examples: + - "my-store" + - "my-store.myshopify.com" + order: 1 + credentials: + title: "Shopify Authorization Method" + description: "The authorization method to use to retrieve data from Shopify" + type: "object" + order: 2 + oneOf: + - type: "object" + title: "OAuth2.0" + description: "OAuth2.0" + required: + - "auth_method" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "The Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: + "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Replication Start Date" + description: + "The date you would like to replicate data from. Format: YYYY-MM-DD.\ + \ Any data before this date will not be replicated." + default: "2020-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 3 + bulk_window_in_days: + type: "integer" + title: "GraphQL BULK Date Range in Days" + description: "Defines what would be a date range per single BULK Job" + default: 30 + fetch_transactions_user_id: + type: "boolean" + title: "Add `user_id` to Transactions (slower)" + description: + "Defines which API type (REST/BULK) to use to fetch `Transactions`\ + \ data. If you are a `Shopify Plus` user, leave the default value to speed\ + \ up the fetch." + default: false + job_product_variants_include_pres_prices: + type: "boolean" + title: "Add `Presentment prices` to Product Variants" + description: + "If enabled, the `Product Variants` stream attempts to include\ + \ `Presentment prices` field (may affect the performance)." + default: true + job_termination_threshold: + type: "integer" + title: "BULK Job termination threshold" + description: + "The max time in seconds, after which the single BULK Job should\ + \ be `CANCELED` and retried. The bigger the value the longer the BULK\ + \ Job is allowed to run." + default: 7200 + minimum: 3600 + maximum: 21600 + job_checkpoint_interval: + type: "integer" + title: "BULK Job checkpoint (rows collected)" + description: "The threshold, after which the single BULK Job should be checkpointed." + default: 100000 + minimum: 15000 + maximum: 200000 + source-omnisend: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "omnisend" + const: "omnisend" + enum: + - "omnisend" + order: 0 + type: "string" + source-omnisend-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + source-mongodb-v2: + title: "MongoDb Source Spec" + type: "object" + required: + - "database_config" + - "sourceType" + properties: + database_config: + type: "object" + title: "Cluster Type" + description: "Configures the MongoDB cluster type." + order: 1 + group: "connection" + display_type: "radio" + oneOf: + - title: "MongoDB Atlas Replica Set" + description: "MongoDB Atlas-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + - "username" + - "password" + - "auth_source" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "ATLAS_REPLICA_SET" + order: 1 + enum: + - "ATLAS_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate." + examples: + - "mongodb+srv://cluster0.abcd1.mongodb.net/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\ + \ for more details." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + - title: "Self-Managed Replica Set" + description: "MongoDB self-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "SELF_MANAGED_REPLICA_SET" + order: 1 + enum: + - "SELF_MANAGED_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\ + \ for more information." + examples: + - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/" + - "mongodb://example.host.com:27017/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to 300 seconds.\ + \ Valid range: 120 seconds to 1200 seconds." + default: 300 + order: 8 + min: 120 + max: 1200 + group: "advanced" + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with memory\ + \ consumption and efficiency of the connector, please be careful." + default: 10000 + order: 9 + min: 1000 + max: 10000 + group: "advanced" + discover_sample_size: + type: "integer" + title: "Document discovery sample size (Advanced)" + description: + "The maximum number of documents to sample when attempting\ + \ to discover the unique fields for a collection." + default: 10000 + order: 10 + minimum: 10 + maximum: 100000 + group: "advanced" + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data in\ + \ case of an stale/invalid cursor value into the WAL. If 'Fail sync' is\ + \ chosen, a user will have to manually reset the connection before being\ + \ able to continue syncing data. If 'Re-sync data' is chosen, Airbyte\ + \ will automatically trigger a refresh but could lead to higher cloud\ + \ costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 11 + group: "advanced" + update_capture_mode: + type: "string" + title: "Capture mode (Advanced)" + description: + "Determines how Airbyte looks up the value of an updated document.\ + \ If 'Lookup' is chosen, the current value of the document will be read.\ + \ If 'Post Image' is chosen, then the version of the document immediately\ + \ after an update will be read. WARNING : Severe data loss will occur\ + \ if this option is chosen and the appropriate settings are not set on\ + \ your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images." + enum: + - "Lookup" + - "Post Image" + default: "Lookup" + order: 12 + group: "advanced" + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 13 + group: "advanced" + sourceType: + title: "mongodb-v2" + const: "mongodb-v2" + enum: + - "mongodb-v2" + order: 0 + type: "string" + groups: + - id: "connection" + - id: "advanced" + title: "Advanced" + source-mongodb-v2-update: + title: "MongoDb Source Spec" + type: "object" + required: + - "database_config" + properties: + database_config: + type: "object" + title: "Cluster Type" + description: "Configures the MongoDB cluster type." + order: 1 + group: "connection" + display_type: "radio" + oneOf: + - title: "MongoDB Atlas Replica Set" + description: "MongoDB Atlas-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + - "username" + - "password" + - "auth_source" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "ATLAS_REPLICA_SET" + order: 1 + enum: + - "ATLAS_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate." + examples: + - "mongodb+srv://cluster0.abcd1.mongodb.net/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\ + \ for more details." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + - title: "Self-Managed Replica Set" + description: "MongoDB self-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "SELF_MANAGED_REPLICA_SET" + order: 1 + enum: + - "SELF_MANAGED_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\ + \ for more information." + examples: + - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/" + - "mongodb://example.host.com:27017/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to 300 seconds.\ + \ Valid range: 120 seconds to 1200 seconds." + default: 300 + order: 8 + min: 120 + max: 1200 + group: "advanced" + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with memory\ + \ consumption and efficiency of the connector, please be careful." + default: 10000 + order: 9 + min: 1000 + max: 10000 + group: "advanced" + discover_sample_size: + type: "integer" + title: "Document discovery sample size (Advanced)" + description: + "The maximum number of documents to sample when attempting\ + \ to discover the unique fields for a collection." + default: 10000 + order: 10 + minimum: 10 + maximum: 100000 + group: "advanced" + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data in\ + \ case of an stale/invalid cursor value into the WAL. If 'Fail sync' is\ + \ chosen, a user will have to manually reset the connection before being\ + \ able to continue syncing data. If 'Re-sync data' is chosen, Airbyte\ + \ will automatically trigger a refresh but could lead to higher cloud\ + \ costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 11 + group: "advanced" + update_capture_mode: + type: "string" + title: "Capture mode (Advanced)" + description: + "Determines how Airbyte looks up the value of an updated document.\ + \ If 'Lookup' is chosen, the current value of the document will be read.\ + \ If 'Post Image' is chosen, then the version of the document immediately\ + \ after an update will be read. WARNING : Severe data loss will occur\ + \ if this option is chosen and the appropriate settings are not set on\ + \ your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images." + enum: + - "Lookup" + - "Post Image" + default: "Lookup" + order: 12 + group: "advanced" + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 13 + group: "advanced" + groups: + - id: "connection" + - id: "advanced" + title: "Advanced" + source-retently: + title: "Retently Api Spec" + type: "object" + properties: + credentials: + title: "Authentication Mechanism" + description: "Choose how to authenticate to Retently" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Retently (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Retently Refresh Token which can be used to fetch new\ + \ Bearer Tokens when the current one expires." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with API Token" + required: + - "api_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_key: + title: "API Token" + description: + "Retently API Token. See the docs for more information on how to obtain this key." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "retently" + const: "retently" + enum: + - "retently" + order: 0 + type: "string" + source-retently-update: + title: "Retently Api Spec" + type: "object" + properties: + credentials: + title: "Authentication Mechanism" + description: "Choose how to authenticate to Retently" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Retently (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Retently Refresh Token which can be used to fetch new\ + \ Bearer Tokens when the current one expires." + airbyte_secret: true + - type: "object" + title: "Authenticate with API Token" + required: + - "api_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_key: + title: "API Token" + description: + "Retently API Token. See the docs for more information on how to obtain this key." + type: "string" + airbyte_secret: true + source-coda: + type: "object" + required: + - "auth_token" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication token" + description: "Bearer token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "coda" + const: "coda" + enum: + - "coda" + order: 0 + type: "string" + source-coda-update: + type: "object" + required: + - "auth_token" + properties: + auth_token: + type: "string" + title: "Authentication token" + description: "Bearer token" + airbyte_secret: true + order: 0 + source-fleetio: + type: "object" + required: + - "api_key" + - "account_token" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_token: + type: "string" + order: 1 + title: "account_token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "fleetio" + const: "fleetio" + enum: + - "fleetio" + order: 0 + type: "string" + source-fleetio-update: + type: "object" + required: + - "api_key" + - "account_token" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + airbyte_secret: true + account_token: + type: "string" + order: 1 + title: "account_token" + airbyte_secret: true + source-pendo: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "pendo" + const: "pendo" + enum: + - "pendo" + order: 0 + type: "string" + source-pendo-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + source-jotform: + type: "object" + required: + - "api_key" + - "api_endpoint" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + end_date: + type: "string" + order: 3 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_endpoint: + type: "object" + oneOf: + - type: "object" + title: "Basic" + required: + - "url_prefix" + properties: + url_prefix: + type: "string" + description: + "You can access our API through the following URLs -\ + \ Standard API Usage (Use the default API URL - https://api.jotform.com),\ + \ For EU (Use the EU API URL - https://eu-api.jotform.com), For\ + \ HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)" + enum: + - "Standard" + - "EU" + - "HIPAA" + title: "Base URL Prefix" + default: "Standard" + api_endpoint: + type: "string" + const: "basic" + order: 0 + enum: + - "basic" + - type: "object" + title: "Enterprise" + required: + - "enterprise_url" + properties: + api_endpoint: + type: "string" + const: "enterprise" + order: 0 + enum: + - "enterprise" + enterprise_url: + type: "string" + description: + "Upgrade to Enterprise to make your API url your-domain.com/API\ + \ or subdomain.jotform.com/API instead of api.jotform.com" + title: "Enterprise URL" + order: 1 + title: "API Endpoint" + sourceType: + title: "jotform" + const: "jotform" + enum: + - "jotform" + order: 0 + type: "string" + source-jotform-update: + type: "object" + required: + - "api_key" + - "api_endpoint" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + end_date: + type: "string" + order: 3 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_endpoint: + type: "object" + oneOf: + - type: "object" + title: "Basic" + required: + - "url_prefix" + properties: + url_prefix: + type: "string" + description: + "You can access our API through the following URLs -\ + \ Standard API Usage (Use the default API URL - https://api.jotform.com),\ + \ For EU (Use the EU API URL - https://eu-api.jotform.com), For\ + \ HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)" + enum: + - "Standard" + - "EU" + - "HIPAA" + title: "Base URL Prefix" + default: "Standard" + api_endpoint: + type: "string" + const: "basic" + order: 0 + enum: + - "basic" + - type: "object" + title: "Enterprise" + required: + - "enterprise_url" + properties: + api_endpoint: + type: "string" + const: "enterprise" + order: 0 + enum: + - "enterprise" + enterprise_url: + type: "string" + description: + "Upgrade to Enterprise to make your API url your-domain.com/API\ + \ or subdomain.jotform.com/API instead of api.jotform.com" + title: "Enterprise URL" + order: 1 + title: "API Endpoint" + source-instagram: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated. If left blank, the start date will be\ + \ set to 2 years before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: + "The value of the access token generated with instagram_basic,\ + \ instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram\ + \ Public Content Access permissions. See the docs for more information" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + sourceType: + title: "instagram" + const: "instagram" + enum: + - "instagram" + order: 0 + type: "string" + required: + - "access_token" + - "sourceType" + source-instagram-update: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated. If left blank, the start date will be\ + \ set to 2 years before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: + "The value of the access token generated with instagram_basic,\ + \ instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram\ + \ Public Content Access permissions. See the docs for more information" + airbyte_secret: true + type: "string" + required: + - "access_token" + source-dbt: + type: "object" + required: + - "api_key_2" + - "account_id" + - "sourceType" + properties: + api_key_2: + type: "string" + order: 0 + title: "Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + order: 1 + title: "account_id" + sourceType: + title: "dbt" + const: "dbt" + enum: + - "dbt" + order: 0 + type: "string" + source-dbt-update: + type: "object" + required: + - "api_key_2" + - "account_id" + properties: + api_key_2: + type: "string" + order: 0 + title: "Token" + airbyte_secret: true + account_id: + type: "string" + order: 1 + title: "account_id" + source-nylas: + type: "object" + required: + - "api_key" + - "api_server" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + api_server: + type: "string" + enum: + - "us" + - "eu" + order: 1 + title: "API Server" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "nylas" + const: "nylas" + enum: + - "nylas" + order: 0 + type: "string" + source-nylas-update: + type: "object" + required: + - "api_key" + - "api_server" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + api_server: + type: "string" + enum: + - "us" + - "eu" + order: 1 + title: "API Server" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + source-s3: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + role_arn: + title: "AWS Role ARN" + description: + "Specifies the Amazon Resource Name (ARN) of an IAM role that\ + \ you want to use to perform operations requested using this profile.\ + \ Set the External ID to the Airbyte workspace ID, which can be found\ + \ in the URL of this page." + order: 6 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + examples: + - "my-s3-endpoint.com" + - "https://my-s3-endpoint.com" + order: 4 + type: "string" + region_name: + title: "AWS Region" + description: + "AWS region where the S3 bucket is located. If not provided,\ + \ the region will be determined automatically." + order: 5 + type: "string" + sourceType: + title: "s3" + const: "s3" + enum: + - "s3" + order: 0 + type: "string" + required: + - "streams" + - "bucket" + - "sourceType" + source-s3-update: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + role_arn: + title: "AWS Role ARN" + description: + "Specifies the Amazon Resource Name (ARN) of an IAM role that\ + \ you want to use to perform operations requested using this profile.\ + \ Set the External ID to the Airbyte workspace ID, which can be found\ + \ in the URL of this page." + order: 6 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + examples: + - "my-s3-endpoint.com" + - "https://my-s3-endpoint.com" + order: 4 + type: "string" + region_name: + title: "AWS Region" + description: + "AWS region where the S3 bucket is located. If not provided,\ + \ the region will be determined automatically." + order: 5 + type: "string" + required: + - "streams" + - "bucket" + source-azure-blob-storage: + title: "SourceAzureBlobStorageSpec" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\ + \ File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Azure Blob Storage" + type: "object" + order: 2 + oneOf: + - title: "Authenticate via Oauth2" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2" + const: "oauth2" + enum: + - "oauth2" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft Azure Application user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Authenticate via Storage Account Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "storage_account_key" + const: "storage_account_key" + enum: + - "storage_account_key" + type: "string" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "azure_blob_storage_account_key" + - "auth_type" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + order: 3 + type: "string" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + description: "The name of the Azure blob storage container." + examples: + - "airbytetescontainername" + order: 4 + type: "string" + azure_blob_storage_endpoint: + title: "Endpoint Domain Name" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + order: 11 + type: "string" + sourceType: + title: "azure-blob-storage" + const: "azure-blob-storage" + enum: + - "azure-blob-storage" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "azure_blob_storage_account_name" + - "azure_blob_storage_container_name" + - "sourceType" + source-azure-blob-storage-update: + title: "SourceAzureBlobStorageSpec" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\ + \ File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Azure Blob Storage" + type: "object" + order: 2 + oneOf: + - title: "Authenticate via Oauth2" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2" + const: "oauth2" + enum: + - "oauth2" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft Azure Application user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Authenticate via Storage Account Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "storage_account_key" + const: "storage_account_key" + enum: + - "storage_account_key" + type: "string" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + order: 3 + type: "string" + required: + - "azure_blob_storage_account_key" + - "auth_type" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + order: 3 + type: "string" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + description: "The name of the Azure blob storage container." + examples: + - "airbytetescontainername" + order: 4 + type: "string" + azure_blob_storage_endpoint: + title: "Endpoint Domain Name" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + order: 11 + type: "string" + required: + - "streams" + - "credentials" + - "azure_blob_storage_account_name" + - "azure_blob_storage_container_name" + source-close-com: + title: "Close.com Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Replication Start Date" + type: "string" + description: + "The start date to sync data; all data after this date will\ + \ be replicated. Leave blank to retrieve all the data available in the\ + \ account. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + sourceType: + title: "close-com" + const: "close-com" + enum: + - "close-com" + order: 0 + type: "string" + source-close-com-update: + title: "Close.com Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + start_date: + title: "Replication Start Date" + type: "string" + description: + "The start date to sync data; all data after this date will\ + \ be replicated. Leave blank to retrieve all the data available in the\ + \ account. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + source-zendesk-sunshine: + type: "object" + required: + - "start_date" + - "subdomain" + - "sourceType" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: + "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + sourceType: + title: "zendesk-sunshine" + const: "zendesk-sunshine" + enum: + - "zendesk-sunshine" + order: 0 + type: "string" + source-zendesk-sunshine-update: + type: "object" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: + "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + source-canny: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "You can find your secret API key in Your Canny Subdomain >\ + \ Settings > API" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "canny" + const: "canny" + enum: + - "canny" + order: 0 + type: "string" + source-canny-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "You can find your secret API key in Your Canny Subdomain >\ + \ Settings > API" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-exchange-rates: + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + - "sourceType" + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + access_key: + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + base: + type: "string" + description: + "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + ignore_weekends: + type: "boolean" + description: "Ignore weekends? (Exchanges don't run on weekends)" + default: true + sourceType: + title: "exchange-rates" + const: "exchange-rates" + enum: + - "exchange-rates" + order: 0 + type: "string" + source-exchange-rates-update: + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + access_key: + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + base: + type: "string" + description: + "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + ignore_weekends: + type: "boolean" + description: "Ignore weekends? (Exchanges don't run on weekends)" + default: true + source-woocommerce: + type: "object" + title: "Woocommerce Spec" + required: + - "api_key" + - "api_secret" + - "shop" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "Customer Key" + description: "Customer Key for API in WooCommerce shop" + airbyte_secret: true + x-speakeasy-param-sensitive: true + api_secret: + type: "string" + order: 1 + title: "Customer Secret" + description: "Customer Secret for API in WooCommerce shop" + airbyte_secret: true + x-speakeasy-param-sensitive: true + shop: + type: "string" + order: 2 + title: "Shop Name" + description: + "The name of the store. For https://EXAMPLE.com, the shop name\ + \ is 'EXAMPLE.com'." + start_date: + type: "string" + order: 3 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2021-01-01" + description: "The date you would like to replicate data from. Format: YYYY-MM-DD" + sourceType: + title: "woocommerce" + const: "woocommerce" + enum: + - "woocommerce" + order: 0 + type: "string" + source-woocommerce-update: + type: "object" + title: "Woocommerce Spec" + required: + - "api_key" + - "api_secret" + - "shop" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "Customer Key" + description: "Customer Key for API in WooCommerce shop" + airbyte_secret: true + api_secret: + type: "string" + order: 1 + title: "Customer Secret" + description: "Customer Secret for API in WooCommerce shop" + airbyte_secret: true + shop: + type: "string" + order: 2 + title: "Shop Name" + description: + "The name of the store. For https://EXAMPLE.com, the shop name\ + \ is 'EXAMPLE.com'." + start_date: + type: "string" + order: 3 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2021-01-01" + description: "The date you would like to replicate data from. Format: YYYY-MM-DD" + source-linkedin-pages: + type: "object" + required: + - "org_id" + - "sourceType" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the LinkedIn developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_secret: + type: "string" + title: "Client secret" + description: "The client secret of the LinkedIn developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access token" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + title: "Authentication" + org_id: + type: "string" + order: 1 + title: "Organization ID" + examples: + - "123456789" + description: "Specify the Organization ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + default: "2023-01-01T00:00:00Z" + description: + "Start date for getting metrics per time period. Must be atmost\ + \ 12 months before the request date (UTC) and atleast 2 days prior to\ + \ the request date (UTC). See https://bit.ly/linkedin-pages-date-rules\ + \ {{ \"\\n\" }} {{ response.errorDetails }}" + time_granularity_type: + enum: + - "DAY" + - "MONTH" + type: "string" + order: 3 + title: "Time Granularity Type" + default: "DAY" + description: + "Granularity of the statistics for metrics per time period.\ + \ Must be either \"DAY\" or \"MONTH\"" + sourceType: + title: "linkedin-pages" + const: "linkedin-pages" + enum: + - "linkedin-pages" + order: 0 + type: "string" + source-linkedin-pages-update: + type: "object" + required: + - "org_id" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the LinkedIn developer application." + airbyte_secret: true + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_secret: + type: "string" + title: "Client secret" + description: "The client secret of the LinkedIn developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + - type: "object" + title: "Access token" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + order: 0 + title: "Authentication" + org_id: + type: "string" + order: 1 + title: "Organization ID" + examples: + - "123456789" + description: "Specify the Organization ID" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + default: "2023-01-01T00:00:00Z" + description: + "Start date for getting metrics per time period. Must be atmost\ + \ 12 months before the request date (UTC) and atleast 2 days prior to\ + \ the request date (UTC). See https://bit.ly/linkedin-pages-date-rules\ + \ {{ \"\\n\" }} {{ response.errorDetails }}" + time_granularity_type: + enum: + - "DAY" + - "MONTH" + type: "string" + order: 3 + title: "Time Granularity Type" + default: "DAY" + description: + "Granularity of the statistics for metrics per time period.\ + \ Must be either \"DAY\" or \"MONTH\"" + source-planhat: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Your Planhat API Access Token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "planhat" + const: "planhat" + enum: + - "planhat" + order: 0 + type: "string" + source-planhat-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Your Planhat API Access Token" + order: 0 + title: "API Token" + airbyte_secret: true + source-whisky-hunter: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "whisky-hunter" + const: "whisky-hunter" + enum: + - "whisky-hunter" + order: 0 + type: "string" + source-whisky-hunter-update: + type: "object" + required: [] + properties: {} + source-tvmaze-schedule: + type: "object" + required: + - "start_date" + - "domestic_schedule_country_code" + - "sourceType" + properties: + start_date: + type: "string" + description: "Start date for TV schedule retrieval. May be in the future." + order: 0 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: + "End date for TV schedule retrieval. May be in the future.\ + \ Optional.\n" + order: 1 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + domestic_schedule_country_code: + type: "string" + description: "Country code for domestic TV schedule retrieval." + examples: + - "US" + - "GB" + order: 2 + web_schedule_country_code: + type: "string" + description: + "ISO 3166-1 country code for web TV schedule retrieval. Leave\ + \ blank for\nall countries plus global web channels (e.g. Netflix). Alternatively,\n\ + set to 'global' for just global web channels.\n" + examples: + - "US" + - "GB" + - "global" + order: 3 + sourceType: + title: "tvmaze-schedule" + const: "tvmaze-schedule" + enum: + - "tvmaze-schedule" + order: 0 + type: "string" + source-tvmaze-schedule-update: + type: "object" + required: + - "start_date" + - "domestic_schedule_country_code" + properties: + start_date: + type: "string" + description: "Start date for TV schedule retrieval. May be in the future." + order: 0 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: + "End date for TV schedule retrieval. May be in the future.\ + \ Optional.\n" + order: 1 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + domestic_schedule_country_code: + type: "string" + description: "Country code for domestic TV schedule retrieval." + examples: + - "US" + - "GB" + order: 2 + web_schedule_country_code: + type: "string" + description: + "ISO 3166-1 country code for web TV schedule retrieval. Leave\ + \ blank for\nall countries plus global web channels (e.g. Netflix). Alternatively,\n\ + set to 'global' for just global web channels.\n" + examples: + - "US" + - "GB" + - "global" + order: 3 + source-salesloft: + type: "object" + required: + - "credentials" + - "start_date" + - "sourceType" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "Authenticate via OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "auth_type" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Salesloft developer application." + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Salesloft developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining a new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + format: "date-time" + description: "The date-time when the access token should be refreshed." + - type: "object" + title: "Authenticate via API Key" + required: + - "api_key" + - "auth_type" + properties: + api_key: + type: "string" + title: "API Key" + description: + "API Key for making authenticated requests. More instruction\ + \ on how to find this value in our docs" + airbyte_secret: true + x-speakeasy-param-sensitive: true + auth_type: + type: "string" + const: "api_key" + enum: + - "api_key" + order: 0 + title: "Credentials" + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + examples: + - "2020-11-16T00:00:00Z" + description: + "The date from which you'd like to replicate data for Salesloft\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + sourceType: + title: "salesloft" + const: "salesloft" + enum: + - "salesloft" + order: 0 + type: "string" + source-salesloft-update: + type: "object" + required: + - "credentials" + - "start_date" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "Authenticate via OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "auth_type" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Salesloft developer application." + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Salesloft developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining a new access token." + airbyte_secret: true + token_expiry_date: + type: "string" + format: "date-time" + description: "The date-time when the access token should be refreshed." + - type: "object" + title: "Authenticate via API Key" + required: + - "api_key" + - "auth_type" + properties: + api_key: + type: "string" + title: "API Key" + description: + "API Key for making authenticated requests. More instruction\ + \ on how to find this value in our docs" + airbyte_secret: true + auth_type: + type: "string" + const: "api_key" + enum: + - "api_key" + order: 0 + title: "Credentials" + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + examples: + - "2020-11-16T00:00:00Z" + description: + "The date from which you'd like to replicate data for Salesloft\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + source-shortio: + title: "Shortio Spec" + type: "object" + required: + - "domain_id" + - "secret_key" + - "start_date" + - "sourceType" + properties: + domain_id: + type: "string" + desciprtion: "Short.io Domain ID" + title: "Domain ID" + airbyte_secret: false + x-speakeasy-param-sensitive: true + secret_key: + type: "string" + title: "Secret Key" + description: "Short.io Secret Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-07-30T03:43:59.244Z" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "shortio" + const: "shortio" + enum: + - "shortio" + order: 0 + type: "string" + source-shortio-update: + title: "Shortio Spec" + type: "object" + required: + - "domain_id" + - "secret_key" + - "start_date" + properties: + domain_id: + type: "string" + desciprtion: "Short.io Domain ID" + title: "Domain ID" + airbyte_secret: false + secret_key: + type: "string" + title: "Secret Key" + description: "Short.io Secret Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-07-30T03:43:59.244Z" + airbyte_secret: false + source-instatus: + title: "Instatus Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Instatus REST API key" + x-speakeasy-param-sensitive: true + sourceType: + title: "instatus" + const: "instatus" + enum: + - "instatus" + order: 0 + type: "string" + source-instatus-update: + title: "Instatus Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Instatus REST API key" + source-yandex-metrica: + title: "Yandex Metrica Spec" + type: "object" + required: + - "auth_token" + - "counter_id" + - "start_date" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication Token" + description: "Your Yandex Metrica API access token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + counter_id: + type: "string" + title: "Counter ID" + description: "Counter ID" + pattern: "^[0-9]+$" + order: 1 + start_date: + title: "Start Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\"." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 2 + end_date: + title: "End Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\". If not provided will sync till most recent date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 3 + sourceType: + title: "yandex-metrica" + const: "yandex-metrica" + enum: + - "yandex-metrica" + order: 0 + type: "string" + source-yandex-metrica-update: + title: "Yandex Metrica Spec" + type: "object" + required: + - "auth_token" + - "counter_id" + - "start_date" + properties: + auth_token: + type: "string" + title: "Authentication Token" + description: "Your Yandex Metrica API access token" + airbyte_secret: true + order: 0 + counter_id: + type: "string" + title: "Counter ID" + description: "Counter ID" + pattern: "^[0-9]+$" + order: 1 + start_date: + title: "Start Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\"." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 2 + end_date: + title: "End Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\". If not provided will sync till most recent date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 3 + source-vwo: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "vwo" + const: "vwo" + enum: + - "vwo" + order: 0 + type: "string" + source-vwo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-aircall: + type: "object" + required: + - "api_id" + - "api_token" + - "start_date" + - "sourceType" + properties: + api_id: + type: "string" + description: "App ID found at settings https://dashboard.aircall.io/integrations/api-keys" + title: "API ID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + api_token: + type: "string" + description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)" + title: "API Token" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + sourceType: + title: "aircall" + const: "aircall" + enum: + - "aircall" + order: 0 + type: "string" + source-aircall-update: + type: "object" + required: + - "api_id" + - "api_token" + - "start_date" + properties: + api_id: + type: "string" + description: "App ID found at settings https://dashboard.aircall.io/integrations/api-keys" + title: "API ID" + airbyte_secret: true + order: 0 + api_token: + type: "string" + description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)" + title: "API Token" + airbyte_secret: true + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + source-clickup-api: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + include_closed_tasks: + type: "boolean" + description: + "Include or exclude closed tasks. By default, they are excluded.\ + \ See here." + order: 5 + title: "Include Closed Tasks" + default: false + sourceType: + title: "clickup-api" + const: "clickup-api" + enum: + - "clickup-api" + order: 0 + type: "string" + source-clickup-api-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + order: 0 + airbyte_secret: true + include_closed_tasks: + type: "boolean" + description: + "Include or exclude closed tasks. By default, they are excluded.\ + \ See here." + order: 5 + title: "Include Closed Tasks" + default: false + source-ezofficeinventory: + type: "object" + required: + - "api_key" + - "subdomain" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your EZOfficeInventory Access Token. API Access is disabled\ + \ by default. Enable API Access in Settings > Integrations > API Integration\ + \ and click on Update to generate a new access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: + "The company name used in signup, also visible in the URL when\ + \ logged in." + name: "subdomain" + order: 1 + title: "Subdomain" + airbyte_secret: false + x-speakeasy-param-sensitive: true + start_date: + type: "string" + description: + "Earliest date you want to sync historical streams (inventory_histories,\ + \ asset_histories, asset_stock_histories) from" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "ezofficeinventory" + const: "ezofficeinventory" + enum: + - "ezofficeinventory" + order: 0 + type: "string" + source-ezofficeinventory-update: + type: "object" + required: + - "api_key" + - "subdomain" + - "start_date" + properties: + api_key: + type: "string" + description: + "Your EZOfficeInventory Access Token. API Access is disabled\ + \ by default. Enable API Access in Settings > Integrations > API Integration\ + \ and click on Update to generate a new access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: + "The company name used in signup, also visible in the URL when\ + \ logged in." + name: "subdomain" + order: 1 + title: "Subdomain" + airbyte_secret: false + start_date: + type: "string" + description: + "Earliest date you want to sync historical streams (inventory_histories,\ + \ asset_histories, asset_stock_histories) from" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-snapchat-marketing: + title: "Snapchat Marketing Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + description: + "Date in the format 2022-01-01. Any data before this date will\ + \ not be replicated." + examples: + - "2022-01-01" + default: "2022-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "Date in the format 2017-01-25. Any data after this date will\ + \ not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-30" + order: 4 + format: "date" + action_report_time: + type: "string" + enum: + - "conversion" + - "impression" + title: "Action Report Time" + description: "Specifies the principle for conversion reporting." + default: "conversion" + order: 5 + swipe_up_attribution_window: + type: "string" + title: "Swipe Up Attribution Window" + description: "Attribution window for swipe ups." + enum: + - "1_DAY" + - "7_DAY" + - "28_DAY" + default: "28_DAY" + order: 6 + view_attribution_window: + type: "string" + title: "View Attribution Window" + description: "Attribution window for views." + enum: + - "1_HOUR" + - "3_HOUR" + - "6_HOUR" + - "1_DAY" + - "7_DAY" + default: "1_DAY" + order: 7 + sourceType: + title: "snapchat-marketing" + const: "snapchat-marketing" + enum: + - "snapchat-marketing" + order: 0 + type: "string" + source-snapchat-marketing-update: + title: "Snapchat Marketing Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 2 + start_date: + title: "Start Date" + type: "string" + description: + "Date in the format 2022-01-01. Any data before this date will\ + \ not be replicated." + examples: + - "2022-01-01" + default: "2022-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "Date in the format 2017-01-25. Any data after this date will\ + \ not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-30" + order: 4 + format: "date" + action_report_time: + type: "string" + enum: + - "conversion" + - "impression" + title: "Action Report Time" + description: "Specifies the principle for conversion reporting." + default: "conversion" + order: 5 + swipe_up_attribution_window: + type: "string" + title: "Swipe Up Attribution Window" + description: "Attribution window for swipe ups." + enum: + - "1_DAY" + - "7_DAY" + - "28_DAY" + default: "28_DAY" + order: 6 + view_attribution_window: + type: "string" + title: "View Attribution Window" + description: "Attribution window for views." + enum: + - "1_HOUR" + - "3_HOUR" + - "6_HOUR" + - "1_DAY" + - "7_DAY" + default: "1_DAY" + order: 7 + source-gitlab: + title: "Source Gitlab Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for GitLab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\ + \ will be replicated. All data generated after this date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + api_url: + type: "string" + examples: + - "gitlab.com" + - "https://gitlab.com" + - "https://gitlab.company.org" + title: "API URL" + default: "gitlab.com" + description: "Please enter your basic URL from GitLab instance." + order: 2 + groups_list: + type: "array" + items: + type: "string" + examples: + - "airbyte.io" + title: "Groups" + description: "List of groups. e.g. airbyte.io." + order: 3 + projects_list: + type: "array" + items: + type: "string" + title: "Projects" + examples: + - "airbyte.io/documentation" + description: + "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab." + order: 4 + sourceType: + title: "gitlab" + const: "gitlab" + enum: + - "gitlab" + order: 0 + type: "string" + source-gitlab-update: + title: "Source Gitlab Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for GitLab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\ + \ will be replicated. All data generated after this date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + api_url: + type: "string" + examples: + - "gitlab.com" + - "https://gitlab.com" + - "https://gitlab.company.org" + title: "API URL" + default: "gitlab.com" + description: "Please enter your basic URL from GitLab instance." + order: 2 + groups_list: + type: "array" + items: + type: "string" + examples: + - "airbyte.io" + title: "Groups" + description: "List of groups. e.g. airbyte.io." + order: 3 + projects_list: + type: "array" + items: + type: "string" + title: "Projects" + examples: + - "airbyte.io/documentation" + description: + "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab." + order: 4 + source-launchdarkly: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + title: "Access token" + description: + "Your Access token. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "launchdarkly" + const: "launchdarkly" + enum: + - "launchdarkly" + order: 0 + type: "string" + source-launchdarkly-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + title: "Access token" + description: + "Your Access token. See here." + airbyte_secret: true + order: 0 + source-snowflake: + title: "Snowflake Source Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Key Pair Authentication" + type: "object" + order: 1 + required: + - "username" + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + order: 0 + enum: + - "Key Pair Authentication" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 2 + properties: + auth_type: + type: "string" + const: "username/password" + order: 0 + enum: + - "username/password" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + x-speakeasy-param-sensitive: true + order: 0 + host: + description: + "The host domain of the snowflake instance (must include the\ + \ account, region, cloud environment, and end with snowflakecomputing.com)." + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + type: "string" + title: "Account Name" + order: 1 + role: + description: "The role you created for Airbyte to access Snowflake." + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 2 + warehouse: + description: "The warehouse you created for Airbyte to access data." + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 3 + database: + description: "The database you created for Airbyte to access data." + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 4 + schema: + description: + "The source Snowflake schema tables. Leave empty to access\ + \ tables from multiple schemas." + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Schema" + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + sourceType: + title: "snowflake" + const: "snowflake" + enum: + - "snowflake" + order: 0 + type: "string" + source-snowflake-update: + title: "Snowflake Source Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + - title: "Key Pair Authentication" + type: "object" + order: 1 + required: + - "username" + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + order: 0 + enum: + - "Key Pair Authentication" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + order: 2 + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + order: 3 + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 2 + properties: + auth_type: + type: "string" + const: "username/password" + order: 0 + enum: + - "username/password" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + order: 0 + host: + description: + "The host domain of the snowflake instance (must include the\ + \ account, region, cloud environment, and end with snowflakecomputing.com)." + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + type: "string" + title: "Account Name" + order: 1 + role: + description: "The role you created for Airbyte to access Snowflake." + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 2 + warehouse: + description: "The warehouse you created for Airbyte to access data." + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 3 + database: + description: "The database you created for Airbyte to access data." + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 4 + schema: + description: + "The source Snowflake schema tables. Leave empty to access\ + \ tables from multiple schemas." + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Schema" + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + source-auth0: + title: "Auth0 Management API Spec" + type: "object" + required: + - "base_url" + - "credentials" + - "sourceType" + properties: + base_url: + type: "string" + title: "Base URL" + examples: + - "https://dev-yourOrg.us.auth0.com/" + description: + "The Authentication API is served over HTTPS. All URLs referenced\ + \ in the documentation have the following base `https://YOUR_DOMAIN`" + credentials: + title: "Authentication Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2 Confidential Application" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "audience" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_confidential_application" + order: 0 + enum: + - "oauth2_confidential_application" + client_id: + title: "Client ID" + description: + "Your application's Client ID. You can find this value\ + \ on the application's\ + \ settings tab after you login the admin portal." + type: "string" + examples: + - "Client_ID" + client_secret: + title: "Client Secret" + description: + "Your application's Client Secret. You can find this\ + \ value on the application's settings tab after you login the admin portal." + type: "string" + examples: + - "Client_Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + audience: + title: "Audience" + description: + "The audience for the token, which is your API. You can\ + \ find this in the Identifier field on your API's settings tab" + type: "string" + examples: + - "https://dev-yourOrg.us.auth0.com/api/v2/" + - type: "object" + title: "OAuth2 Access Token" + required: + - "access_token" + - "auth_type" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_access_token" + examples: + - "oauth2_access_token" + order: 0 + enum: + - "oauth2_access_token" + access_token: + title: "OAuth2 Access Token" + description: + "Also called API Access Token The access token used to call the Auth0 Management\ + \ API Token. It's a JWT that contains specific grant permissions\ + \ knowns as scopes." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "auth0" + const: "auth0" + enum: + - "auth0" + order: 0 + type: "string" + source-auth0-update: + title: "Auth0 Management API Spec" + type: "object" + required: + - "base_url" + - "credentials" + properties: + base_url: + type: "string" + title: "Base URL" + examples: + - "https://dev-yourOrg.us.auth0.com/" + description: + "The Authentication API is served over HTTPS. All URLs referenced\ + \ in the documentation have the following base `https://YOUR_DOMAIN`" + credentials: + title: "Authentication Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2 Confidential Application" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "audience" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_confidential_application" + order: 0 + enum: + - "oauth2_confidential_application" + client_id: + title: "Client ID" + description: + "Your application's Client ID. You can find this value\ + \ on the application's\ + \ settings tab after you login the admin portal." + type: "string" + examples: + - "Client_ID" + client_secret: + title: "Client Secret" + description: + "Your application's Client Secret. You can find this\ + \ value on the application's settings tab after you login the admin portal." + type: "string" + examples: + - "Client_Secret" + airbyte_secret: true + audience: + title: "Audience" + description: + "The audience for the token, which is your API. You can\ + \ find this in the Identifier field on your API's settings tab" + type: "string" + examples: + - "https://dev-yourOrg.us.auth0.com/api/v2/" + - type: "object" + title: "OAuth2 Access Token" + required: + - "access_token" + - "auth_type" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_access_token" + examples: + - "oauth2_access_token" + order: 0 + enum: + - "oauth2_access_token" + access_token: + title: "OAuth2 Access Token" + description: + "Also called API Access Token The access token used to call the Auth0 Management\ + \ API Token. It's a JWT that contains specific grant permissions\ + \ knowns as scopes." + type: "string" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false + source-linnworks: + title: "Linnworks Spec" + type: "object" + required: + - "application_id" + - "application_secret" + - "token" + - "start_date" + - "sourceType" + properties: + application_id: + title: "Application ID." + description: "Linnworks Application ID" + type: "string" + application_secret: + title: "Application Secret" + description: "Linnworks Application Secret" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + token: + title: "API Token" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + type: "string" + format: "date-time" + sourceType: + title: "linnworks" + const: "linnworks" + enum: + - "linnworks" + order: 0 + type: "string" + source-linnworks-update: + title: "Linnworks Spec" + type: "object" + required: + - "application_id" + - "application_secret" + - "token" + - "start_date" + properties: + application_id: + title: "Application ID." + description: "Linnworks Application ID" + type: "string" + application_secret: + title: "Application Secret" + description: "Linnworks Application Secret" + type: "string" + airbyte_secret: true + token: + title: "API Token" + type: "string" + airbyte_secret: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + type: "string" + format: "date-time" + source-microsoft-sharepoint: + title: "Microsoft SharePoint Source Spec" + description: + "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the SharePoint API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access,\ + \ 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to\ + \ search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + sourceType: + title: "microsoft-sharepoint" + const: "microsoft-sharepoint" + enum: + - "microsoft-sharepoint" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "sourceType" + source-microsoft-sharepoint-update: + title: "Microsoft SharePoint Source Spec" + description: + "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the SharePoint API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access,\ + \ 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to\ + \ search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + required: + - "streams" + - "credentials" + source-amazon-sqs: + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + - "sourceType" + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: + "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: + "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: + "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + x-speakeasy-param-sensitive: true + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + x-speakeasy-param-sensitive: true + sourceType: + title: "amazon-sqs" + const: "amazon-sqs" + enum: + - "amazon-sqs" + order: 0 + type: "string" + source-amazon-sqs-update: + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: + "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: + "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: + "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + source-sonar-cloud: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + - "sourceType" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "sonar-cloud" + const: "sonar-cloud" + enum: + - "sonar-cloud" + order: 0 + type: "string" + source-sonar-cloud-update: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + source-clockify: + type: "object" + required: + - "api_key" + - "workspace_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "You can get your api access_key here This API is Case Sensitive." + order: 0 + x-speakeasy-param-sensitive: true + api_url: + type: "string" + title: "API Url" + description: + "The URL for the Clockify API. This should only need to be\ + \ modified if connecting to an enterprise version of Clockify." + default: "https://api.clockify.me" + order: 1 + workspace_id: + type: "string" + title: "Workspace Id" + description: "WorkSpace Id" + order: 2 + sourceType: + title: "clockify" + const: "clockify" + enum: + - "clockify" + order: 0 + type: "string" + source-clockify-update: + type: "object" + required: + - "api_key" + - "workspace_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "You can get your api access_key here This API is Case Sensitive." + order: 0 + api_url: + type: "string" + title: "API Url" + description: + "The URL for the Clockify API. This should only need to be\ + \ modified if connecting to an enterprise version of Clockify." + default: "https://api.clockify.me" + order: 1 + workspace_id: + type: "string" + title: "Workspace Id" + description: "WorkSpace Id" + order: 2 + source-marketo: + title: "Source Marketo Spec" + type: "object" + required: + - "domain_url" + - "client_id" + - "client_secret" + - "start_date" + - "sourceType" + properties: + domain_url: + title: "Domain URL" + type: "string" + order: 3 + description: + "Your Marketo Base URL. See the docs for info on how to obtain this." + examples: + - "https://000-AAA-000.mktorest.com" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Marketo developer application. See the\ + \ docs for info on how to obtain this." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Marketo developer application. See\ + \ the\ + \ docs for info on how to obtain this." + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + order: 2 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-09-25T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + sourceType: + title: "marketo" + const: "marketo" + enum: + - "marketo" + order: 0 + type: "string" + source-marketo-update: + title: "Source Marketo Spec" + type: "object" + required: + - "domain_url" + - "client_id" + - "client_secret" + - "start_date" + properties: + domain_url: + title: "Domain URL" + type: "string" + order: 3 + description: + "Your Marketo Base URL. See the docs for info on how to obtain this." + examples: + - "https://000-AAA-000.mktorest.com" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Marketo developer application. See the\ + \ docs for info on how to obtain this." + order: 0 + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Marketo developer application. See\ + \ the\ + \ docs for info on how to obtain this." + order: 1 + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + order: 2 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-09-25T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + source-pocket: + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + - "sourceType" + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: + "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: + "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + sourceType: + title: "pocket" + const: "pocket" + enum: + - "pocket" + order: 0 + type: "string" + source-pocket-update: + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: + "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: + "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + source-productboard: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Your Productboard access token. See https://developer.productboard.com/reference/authentication\ + \ for steps to generate one." + name: "api_key" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "productboard" + const: "productboard" + enum: + - "productboard" + order: 0 + type: "string" + source-productboard-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Your Productboard access token. See https://developer.productboard.com/reference/authentication\ + \ for steps to generate one." + name: "api_key" + order: 0 + title: "Access Token" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-smartsheets: + title: "Smartsheets Source Spec" + type: "object" + required: + - "credentials" + - "spreadsheet_id" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Access Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token to use for accessing your data from\ + \ Smartsheets. This access token must be generated by a user with\ + \ at least read access to the data you'd like to replicate. Generate\ + \ an access token in the Smartsheets main menu by clicking Account\ + \ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + spreadsheet_id: + title: "Sheet ID" + description: + "The spreadsheet ID. Find it by opening the spreadsheet then\ + \ navigating to File > Properties" + type: "string" + order: 1 + metadata_fields: + title: "Metadata Fields" + type: "array" + items: + title: "Validenums" + enum: + - "sheetcreatedAt" + - "sheetid" + - "sheetmodifiedAt" + - "sheetname" + - "sheetpermalink" + - "sheetversion" + - "sheetaccess_level" + - "row_id" + - "row_access_level" + - "row_created_at" + - "row_created_by" + - "row_expanded" + - "row_modified_by" + - "row_parent_id" + - "row_permalink" + - "row_number" + - "row_version" + description: "A List of available columns which metadata can be pulled from." + order: 3 + sourceType: + title: "smartsheets" + const: "smartsheets" + enum: + - "smartsheets" + order: 0 + type: "string" + source-smartsheets-update: + title: "Smartsheets Source Spec" + type: "object" + required: + - "credentials" + - "spreadsheet_id" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "API Access Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token to use for accessing your data from\ + \ Smartsheets. This access token must be generated by a user with\ + \ at least read access to the data you'd like to replicate. Generate\ + \ an access token in the Smartsheets main menu by clicking Account\ + \ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token." + airbyte_secret: true + spreadsheet_id: + title: "Sheet ID" + description: + "The spreadsheet ID. Find it by opening the spreadsheet then\ + \ navigating to File > Properties" + type: "string" + order: 1 + metadata_fields: + title: "Metadata Fields" + type: "array" + items: + title: "Validenums" + enum: + - "sheetcreatedAt" + - "sheetid" + - "sheetmodifiedAt" + - "sheetname" + - "sheetpermalink" + - "sheetversion" + - "sheetaccess_level" + - "row_id" + - "row_access_level" + - "row_created_at" + - "row_created_by" + - "row_expanded" + - "row_modified_by" + - "row_parent_id" + - "row_permalink" + - "row_number" + - "row_version" + description: "A List of available columns which metadata can be pulled from." + order: 3 + source-lob: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use for authentication. You can find your account's\ + \ API keys in your Dashboard Settings at https://dashboard.lob.com/settings/api-keys." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + sourceType: + title: "lob" + const: "lob" + enum: + - "lob" + order: 0 + type: "string" + source-lob-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use for authentication. You can find your account's\ + \ API keys in your Dashboard Settings at https://dashboard.lob.com/settings/api-keys." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + source-iterable: + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + sourceType: + title: "iterable" + const: "iterable" + enum: + - "iterable" + order: 0 + type: "string" + source-iterable-update: + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + source-mysql: + title: "MySql Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "replication_method" + - "sourceType" + properties: + host: + description: "The host name of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port to connect to." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + description: "The database name." + title: "Database" + type: "string" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + always_show: true + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For\ + \ more information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. Read more in the docs." + type: "object" + order: 7 + oneOf: + - title: "preferred" + description: + "Automatically attempt SSL connection. If the MySQL server\ + \ does not support SSL, continue with a regular connection." + required: + - "mode" + properties: + mode: + type: "string" + const: "preferred" + order: 0 + enum: + - "preferred" + - title: "required" + description: + "Always connect with SSL. If the MySQL server doesn’t support\ + \ SSL, the connection will not be established. Certificate Authority\ + \ (CA) and Hostname are not verified." + required: + - "mode" + properties: + mode: + type: "string" + const: "required" + order: 0 + enum: + - "required" + - title: "Verify CA" + description: + "Always connect with SSL. Verifies CA, but allows connection\ + \ even if Hostname does not match." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_ca" + order: 0 + enum: + - "verify_ca" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Verify Identity" + description: "Always connect with SSL. Verify both CA and Hostname." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_identity" + order: 0 + enum: + - "verify_identity" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 8 + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Binary Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the MySQL binary log. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 1 + always_show: true + server_time_zone: + type: "string" + title: "Configured server timezone for the MySQL source (Advanced)" + description: + "Enter the configured MySQL server timezone. This should\ + \ only be done if the configured timezone in your MySQL instance\ + \ does not conform to IANNA standard." + order: 2 + always_show: true + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 3 + always_show: true + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 4 + always_show: true + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "mysql" + const: "mysql" + enum: + - "mysql" + order: 0 + type: "string" + source-mysql-update: + title: "MySql Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "replication_method" + properties: + host: + description: "The host name of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port to connect to." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + description: "The database name." + title: "Database" + type: "string" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + always_show: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For\ + \ more information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. Read more in the docs." + type: "object" + order: 7 + oneOf: + - title: "preferred" + description: + "Automatically attempt SSL connection. If the MySQL server\ + \ does not support SSL, continue with a regular connection." + required: + - "mode" + properties: + mode: + type: "string" + const: "preferred" + order: 0 + enum: + - "preferred" + - title: "required" + description: + "Always connect with SSL. If the MySQL server doesn’t support\ + \ SSL, the connection will not be established. Certificate Authority\ + \ (CA) and Hostname are not verified." + required: + - "mode" + properties: + mode: + type: "string" + const: "required" + order: 0 + enum: + - "required" + - title: "Verify CA" + description: + "Always connect with SSL. Verifies CA, but allows connection\ + \ even if Hostname does not match." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_ca" + order: 0 + enum: + - "verify_ca" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "Verify Identity" + description: "Always connect with SSL. Verify both CA and Hostname." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_identity" + order: 0 + enum: + - "verify_identity" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 8 + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Binary Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the MySQL binary log. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 1 + always_show: true + server_time_zone: + type: "string" + title: "Configured server timezone for the MySQL source (Advanced)" + description: + "Enter the configured MySQL server timezone. This should\ + \ only be done if the configured timezone in your MySQL instance\ + \ does not conform to IANNA standard." + order: 2 + always_show: true + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 3 + always_show: true + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 4 + always_show: true + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-rollbar: + type: "object" + required: + - "project_access_token" + - "start_date" + - "account_access_token" + - "sourceType" + properties: + project_access_token: + type: "string" + name: "api_key" + title: "Project Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + account_access_token: + type: "string" + title: "Account Access Token" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "rollbar" + const: "rollbar" + enum: + - "rollbar" + order: 0 + type: "string" + source-rollbar-update: + type: "object" + required: + - "project_access_token" + - "start_date" + - "account_access_token" + properties: + project_access_token: + type: "string" + name: "api_key" + title: "Project Access Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + account_access_token: + type: "string" + title: "Account Access Token" + airbyte_secret: true + order: 2 + source-emailoctopus: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "EmailOctopus API key" + description: + "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "emailoctopus" + const: "emailoctopus" + enum: + - "emailoctopus" + order: 0 + type: "string" + source-emailoctopus-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "EmailOctopus API key" + description: + "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + order: 0 + source-railz: + title: "Railz Spec" + type: "object" + required: + - "client_id" + - "secret_key" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID (client_id)" + order: 0 + secret_key: + type: "string" + title: "Secret key" + description: "Secret key (secret_key)" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + description: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + sourceType: + title: "railz" + const: "railz" + enum: + - "railz" + order: 0 + type: "string" + source-railz-update: + title: "Railz Spec" + type: "object" + required: + - "client_id" + - "secret_key" + - "start_date" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID (client_id)" + order: 0 + secret_key: + type: "string" + title: "Secret key" + description: "Secret key (secret_key)" + order: 1 + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + description: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + source-orbit: + type: "object" + required: + - "api_token" + - "workspace" + - "sourceType" + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: + "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + x-speakeasy-param-sensitive: true + workspace: + type: "string" + title: "Workspace" + description: + "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + sourceType: + title: "orbit" + const: "orbit" + enum: + - "orbit" + order: 0 + type: "string" + source-orbit-update: + type: "object" + required: + - "api_token" + - "workspace" + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: + "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + workspace: + type: "string" + title: "Workspace" + description: + "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + source-apify-dataset: + title: "Apify Dataset Spec" + type: "object" + required: + - "token" + - "dataset_id" + - "sourceType" + properties: + token: + type: "string" + title: "API token" + description: + "Personal API token of your Apify account. In Apify Console,\ + \ you can find your API token in the Settings section under the Integrations tab after you login. See\ + \ the Apify Docs for more information." + examples: + - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk" + airbyte_secret: true + x-speakeasy-param-sensitive: true + dataset_id: + type: "string" + title: "Dataset ID" + description: + "ID of the dataset you would like to load to Airbyte. In Apify\ + \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\ + \ for more information." + examples: + - "rHuMdwm6xCFt6WiGU" + sourceType: + title: "apify-dataset" + const: "apify-dataset" + enum: + - "apify-dataset" + order: 0 + type: "string" + source-apify-dataset-update: + title: "Apify Dataset Spec" + type: "object" + required: + - "token" + - "dataset_id" + properties: + token: + type: "string" + title: "API token" + description: + "Personal API token of your Apify account. In Apify Console,\ + \ you can find your API token in the Settings section under the Integrations tab after you login. See\ + \ the Apify Docs for more information." + examples: + - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk" + airbyte_secret: true + dataset_id: + type: "string" + title: "Dataset ID" + description: + "ID of the dataset you would like to load to Airbyte. In Apify\ + \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\ + \ for more information." + examples: + - "rHuMdwm6xCFt6WiGU" + source-confluence: + type: "object" + required: + - "email" + - "api_token" + - "domain_name" + - "sourceType" + properties: + email: + type: "string" + title: "Email" + description: "Your Confluence login email" + examples: + - "abc@example.com" + order: 0 + api_token: + type: "string" + title: "API Token" + description: + "Please follow the Jira confluence for generating an API token:\ + \ generating an API token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + domain_name: + type: "string" + title: "Domain name" + description: "Your Confluence domain name" + order: 2 + sourceType: + title: "confluence" + const: "confluence" + enum: + - "confluence" + order: 0 + type: "string" + source-confluence-update: + type: "object" + required: + - "email" + - "api_token" + - "domain_name" + properties: + email: + type: "string" + title: "Email" + description: "Your Confluence login email" + examples: + - "abc@example.com" + order: 0 + api_token: + type: "string" + title: "API Token" + description: + "Please follow the Jira confluence for generating an API token:\ + \ generating an API token." + airbyte_secret: true + order: 1 + domain_name: + type: "string" + title: "Domain name" + description: "Your Confluence domain name" + order: 2 + source-coin-api: + title: "Coin API Spec" + type: "object" + required: + - "api_key" + - "environment" + - "symbol_id" + - "period" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + environment: + type: "string" + description: "The environment to use. Either sandbox or production.\n" + enum: + - "sandbox" + - "production" + default: "sandbox" + order: 1 + symbol_id: + type: "string" + description: + "The symbol ID to use. See the documentation for a list.\n\ + https://docs.coinapi.io/#list-all-symbols-get\n" + order: 2 + period: + type: "string" + description: "The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get" + examples: + - "5SEC" + - "2MTH" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: "The start date in ISO 8601 format." + examples: + - "2019-01-01T00:00:00" + end_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: + "The end date in ISO 8601 format. If not supplied, data will\ + \ be returned\nfrom the start date to the current time, or when the count\ + \ of result\nelements reaches its limit.\n" + examples: + - "2019-01-01T00:00:00" + limit: + type: "integer" + description: + "The maximum number of elements to return. If not supplied,\ + \ the default\nis 100. For numbers larger than 100, each 100 items is\ + \ counted as one\nrequest for pricing purposes. Maximum value is 100000.\n" + minimum: 1 + maximum: 100000 + default: 100 + sourceType: + title: "coin-api" + const: "coin-api" + enum: + - "coin-api" + order: 0 + type: "string" + source-coin-api-update: + title: "Coin API Spec" + type: "object" + required: + - "api_key" + - "environment" + - "symbol_id" + - "period" + - "start_date" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + environment: + type: "string" + description: "The environment to use. Either sandbox or production.\n" + enum: + - "sandbox" + - "production" + default: "sandbox" + order: 1 + symbol_id: + type: "string" + description: + "The symbol ID to use. See the documentation for a list.\n\ + https://docs.coinapi.io/#list-all-symbols-get\n" + order: 2 + period: + type: "string" + description: "The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get" + examples: + - "5SEC" + - "2MTH" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: "The start date in ISO 8601 format." + examples: + - "2019-01-01T00:00:00" + end_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: + "The end date in ISO 8601 format. If not supplied, data will\ + \ be returned\nfrom the start date to the current time, or when the count\ + \ of result\nelements reaches its limit.\n" + examples: + - "2019-01-01T00:00:00" + limit: + type: "integer" + description: + "The maximum number of elements to return. If not supplied,\ + \ the default\nis 100. For numbers larger than 100, each 100 items is\ + \ counted as one\nrequest for pricing purposes. Maximum value is 100000.\n" + minimum: 1 + maximum: 100000 + default: 100 + source-orb: + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Orb API Key" + description: "Orb API Key, issued from the Orb admin console." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at before this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2022-03-01T00:00:00Z" + order: 1 + end_date: + type: "string" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at after this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2024-03-01T00:00:00Z" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N days. By default, updated objects that are\ + \ not newly created are not incrementally synced." + order: 3 + string_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (string values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 4 + numeric_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (numeric values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 5 + subscription_usage_grouping_key: + type: "string" + title: "Subscription usage grouping key (string value)" + description: "Property key name to group subscription usage by." + order: 6 + plan_id: + type: "string" + title: "Orb Plan ID for Subscription Usage (string value)" + description: + "Orb Plan ID to filter subscriptions that should have usage\ + \ fetched." + order: 7 + sourceType: + title: "orb" + const: "orb" + enum: + - "orb" + order: 0 + type: "string" + source-orb-update: + type: "object" + required: + - "start_date" + - "api_key" + properties: + api_key: + type: "string" + title: "Orb API Key" + description: "Orb API Key, issued from the Orb admin console." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at before this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2022-03-01T00:00:00Z" + order: 1 + end_date: + type: "string" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at after this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2024-03-01T00:00:00Z" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N days. By default, updated objects that are\ + \ not newly created are not incrementally synced." + order: 3 + string_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (string values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 4 + numeric_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (numeric values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 5 + subscription_usage_grouping_key: + type: "string" + title: "Subscription usage grouping key (string value)" + description: "Property key name to group subscription usage by." + order: 6 + plan_id: + type: "string" + title: "Orb Plan ID for Subscription Usage (string value)" + description: + "Orb Plan ID to filter subscriptions that should have usage\ + \ fetched." + order: 7 + source-sentry: + title: "Sentry Spec" + type: "object" + required: + - "auth_token" + - "organization" + - "project" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication Tokens" + description: + "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\ + \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\"" + airbyte_secret: true + x-speakeasy-param-sensitive: true + hostname: + type: "string" + title: "Host Name" + description: + "Host name of Sentry API server.For self-hosted, specify your\ + \ host name here. Otherwise, leave it empty." + default: "sentry.io" + organization: + type: "string" + title: "Organization" + description: "The slug of the organization the groups belong to." + project: + type: "string" + title: "Project" + description: "The name (slug) of the Project you want to sync." + discover_fields: + type: "array" + item: "string" + title: "Discover Event Fields" + description: "Fields to retrieve when fetching discover events" + sourceType: + title: "sentry" + const: "sentry" + enum: + - "sentry" + order: 0 + type: "string" + source-sentry-update: + title: "Sentry Spec" + type: "object" + required: + - "auth_token" + - "organization" + - "project" + properties: + auth_token: + type: "string" + title: "Authentication Tokens" + description: + "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\ + \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\"" + airbyte_secret: true + hostname: + type: "string" + title: "Host Name" + description: + "Host name of Sentry API server.For self-hosted, specify your\ + \ host name here. Otherwise, leave it empty." + default: "sentry.io" + organization: + type: "string" + title: "Organization" + description: "The slug of the organization the groups belong to." + project: + type: "string" + title: "Project" + description: "The name (slug) of the Project you want to sync." + discover_fields: + type: "array" + item: "string" + title: "Discover Event Fields" + description: "Fields to retrieve when fetching discover events" + source-notion: + title: "Notion Source Spec" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\ + \ During incremental sync, any data generated before this date will not\ + \ be replicated. If left blank, the start date will be set to 2 years\ + \ before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-11-16T00:00:00.000Z" + type: "string" + format: "date-time" + credentials: + title: "Authentication Method" + description: + "Choose either OAuth (recommended for Airbyte Cloud) or Access\ + \ Token. See our docs\ + \ for more information." + type: "object" + order: 1 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "The Access Token received by completing the OAuth flow\ + \ for your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access Token" + required: + - "auth_type" + - "token" + properties: + auth_type: + type: "string" + const: "token" + enum: + - "token" + token: + title: "Access Token" + description: + "The Access Token for your private Notion integration.\ + \ See the docs\ + \ for more information on how to obtain this token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "notion" + const: "notion" + enum: + - "notion" + order: 0 + type: "string" + source-notion-update: + title: "Notion Source Spec" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\ + \ During incremental sync, any data generated before this date will not\ + \ be replicated. If left blank, the start date will be set to 2 years\ + \ before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-11-16T00:00:00.000Z" + type: "string" + format: "date-time" + credentials: + title: "Authentication Method" + description: + "Choose either OAuth (recommended for Airbyte Cloud) or Access\ + \ Token. See our docs\ + \ for more information." + type: "object" + order: 1 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "The Access Token received by completing the OAuth flow\ + \ for your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + - type: "object" + title: "Access Token" + required: + - "auth_type" + - "token" + properties: + auth_type: + type: "string" + const: "token" + enum: + - "token" + token: + title: "Access Token" + description: + "The Access Token for your private Notion integration.\ + \ See the docs\ + \ for more information on how to obtain this token." + type: "string" + airbyte_secret: true + source-trustpilot: + title: "Trustpilot Spec" + type: "object" + required: + - "credentials" + - "business_units" + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth 2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "API key" + description: + "The API key of the Trustpilot API application. (represents\ + \ the OAuth Client ID)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Secret" + description: + "The Secret of the Trustpilot API application. (represents\ + \ the OAuth Client Secret)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + title: "Token expiry date time" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Key" + description: + "The API key authentication method gives you access to only\ + \ the streams which are part of the Public API. When you want to get\ + \ streams available via the Consumer API (e.g. the private reviews)\ + \ you need to use authentication method OAuth 2.0." + required: + - "client_id" + properties: + auth_type: + type: "string" + const: "apikey" + enum: + - "apikey" + client_id: + type: "string" + title: "API key" + description: "The API key of the Trustpilot API application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + business_units: + type: "array" + items: + type: "string" + title: "Business Unit names" + description: + "The names of business units which shall be synchronized. Some\ + \ streams e.g. configured_business_units or private_reviews use this configuration." + examples: + - "mydomain.com" + - "www.mydomain.com" + start_date: + type: "string" + title: "Start Date" + description: + "For streams with sync. method incremental the start date time\ + \ to be used" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "%Y-%m-%dT%H:%M:%SZ" + sourceType: + title: "trustpilot" + const: "trustpilot" + enum: + - "trustpilot" + order: 0 + type: "string" + source-trustpilot-update: + title: "Trustpilot Spec" + type: "object" + required: + - "credentials" + - "business_units" + - "start_date" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth 2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "API key" + description: + "The API key of the Trustpilot API application. (represents\ + \ the OAuth Client ID)" + airbyte_secret: true + client_secret: + type: "string" + title: "Secret" + description: + "The Secret of the Trustpilot API application. (represents\ + \ the OAuth Client Secret)" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + title: "Token expiry date time" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - type: "object" + title: "API Key" + description: + "The API key authentication method gives you access to only\ + \ the streams which are part of the Public API. When you want to get\ + \ streams available via the Consumer API (e.g. the private reviews)\ + \ you need to use authentication method OAuth 2.0." + required: + - "client_id" + properties: + auth_type: + type: "string" + const: "apikey" + enum: + - "apikey" + client_id: + type: "string" + title: "API key" + description: "The API key of the Trustpilot API application." + airbyte_secret: true + business_units: + type: "array" + items: + type: "string" + title: "Business Unit names" + description: + "The names of business units which shall be synchronized. Some\ + \ streams e.g. configured_business_units or private_reviews use this configuration." + examples: + - "mydomain.com" + - "www.mydomain.com" + start_date: + type: "string" + title: "Start Date" + description: + "For streams with sync. method incremental the start date time\ + \ to be used" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "%Y-%m-%dT%H:%M:%SZ" + source-google-webfonts: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + order: 0 + x-speakeasy-param-sensitive: true + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + order: 1 + prettyPrint: + type: "string" + description: "Optional, boolean type" + order: 2 + sort: + type: "string" + description: "Optional, to find how to sort" + order: 3 + sourceType: + title: "google-webfonts" + const: "google-webfonts" + enum: + - "google-webfonts" + order: 0 + type: "string" + source-google-webfonts-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + order: 0 + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + order: 1 + prettyPrint: + type: "string" + description: "Optional, boolean type" + order: 2 + sort: + type: "string" + description: "Optional, to find how to sort" + order: 3 + source-pypi: + type: "object" + required: + - "project_name" + - "sourceType" + properties: + version: + type: "string" + title: "Package Version" + description: + "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + order: 1 + project_name: + type: "string" + title: "PyPI Package" + description: + "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + order: 0 + sourceType: + title: "pypi" + const: "pypi" + enum: + - "pypi" + order: 0 + type: "string" + source-pypi-update: + type: "object" + required: + - "project_name" + properties: + version: + type: "string" + title: "Package Version" + description: + "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + order: 1 + project_name: + type: "string" + title: "PyPI Package" + description: + "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + order: 0 + source-slack: + title: "Slack Spec" + type: "object" + required: + - "start_date" + - "lookback_window" + - "join_channels" + - "sourceType" + properties: + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + title: "Start Date" + format: "date-time" + lookback_window: + type: "integer" + title: "Threads Lookback window (Days)" + description: + "How far into the past to look for messages in threads, default\ + \ is 0 days" + examples: + - 7 + - 14 + minimum: 0 + default: 0 + maximum: 365 + join_channels: + type: "boolean" + default: true + title: "Join all channels" + description: + "Whether to join all channels or to sync data only from channels\ + \ the bot is already in. If false, you'll need to manually add the bot\ + \ to all the channels from which you'd like to sync messages. " + include_private_channels: + type: "boolean" + default: false + title: "Include private channels" + description: + "Whether to read information from private channels that the\ + \ bot is already in. If false, only public channels will be read. If\ + \ true, the bot must be manually added to private channels. " + channel_filter: + type: "array" + default: [] + items: + type: "string" + minLength: 0 + title: "Channel name filter" + description: + "A channel name list (without leading '#' char) which limit\ + \ the channels from which you'd like to sync. Empty list means no filter." + examples: + - "channel_one" + - "channel_two" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate into Slack" + type: "object" + oneOf: + - type: "object" + title: "Sign in via Slack (OAuth)" + required: + - "option_title" + - "client_id" + - "client_secret" + - "access_token" + properties: + option_title: + type: "string" + const: "Default OAuth2.0 authorization" + enum: + - "Default OAuth2.0 authorization" + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access token" + description: + "Slack access_token. See our docs if you need help generating the token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + - type: "object" + title: "API Token" + required: + - "option_title" + - "api_token" + properties: + option_title: + type: "string" + const: "API Token Credentials" + enum: + - "API Token Credentials" + api_token: + type: "string" + title: "API Token" + description: + "A Slack bot token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 1 + sourceType: + title: "slack" + const: "slack" + enum: + - "slack" + order: 0 + type: "string" + source-slack-update: + title: "Slack Spec" + type: "object" + required: + - "start_date" + - "lookback_window" + - "join_channels" + properties: + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + title: "Start Date" + format: "date-time" + lookback_window: + type: "integer" + title: "Threads Lookback window (Days)" + description: + "How far into the past to look for messages in threads, default\ + \ is 0 days" + examples: + - 7 + - 14 + minimum: 0 + default: 0 + maximum: 365 + join_channels: + type: "boolean" + default: true + title: "Join all channels" + description: + "Whether to join all channels or to sync data only from channels\ + \ the bot is already in. If false, you'll need to manually add the bot\ + \ to all the channels from which you'd like to sync messages. " + include_private_channels: + type: "boolean" + default: false + title: "Include private channels" + description: + "Whether to read information from private channels that the\ + \ bot is already in. If false, only public channels will be read. If\ + \ true, the bot must be manually added to private channels. " + channel_filter: + type: "array" + default: [] + items: + type: "string" + minLength: 0 + title: "Channel name filter" + description: + "A channel name list (without leading '#' char) which limit\ + \ the channels from which you'd like to sync. Empty list means no filter." + examples: + - "channel_one" + - "channel_two" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate into Slack" + type: "object" + oneOf: + - type: "object" + title: "Sign in via Slack (OAuth)" + required: + - "option_title" + - "client_id" + - "client_secret" + - "access_token" + properties: + option_title: + type: "string" + const: "Default OAuth2.0 authorization" + enum: + - "Default OAuth2.0 authorization" + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + access_token: + type: "string" + title: "Access token" + description: + "Slack access_token. See our docs if you need help generating the token." + airbyte_secret: true + order: 0 + - type: "object" + title: "API Token" + required: + - "option_title" + - "api_token" + properties: + option_title: + type: "string" + const: "API Token Credentials" + enum: + - "API Token Credentials" + api_token: + type: "string" + title: "API Token" + description: + "A Slack bot token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + source-file: + title: "File Source Spec" + type: "object" + required: + - "dataset_name" + - "format" + - "url" + - "provider" + - "sourceType" + properties: + dataset_name: + type: "string" + title: "Dataset Name" + description: + "The Name of the final table to replicate this file into (should\ + \ include letters, numbers dash and underscores only)." + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "excel_binary" + - "fwf" + - "feather" + - "parquet" + - "yaml" + default: "csv" + title: "File Format" + description: + "The Format of the file which should be replicated (Warning:\ + \ some formats may be experimental, please refer to the docs)." + reader_options: + type: "string" + title: "Reader Options" + description: + "This should be a string in JSON format. It depends on the\ + \ chosen file format to provide additional options and tune its behavior." + examples: + - "{}" + - '{"sep": " "}' + - "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"\ + ] }" + url: + type: "string" + title: "URL" + description: "The URL path to access the file which should be replicated." + examples: + - "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" + - "gs://my-google-bucket/data.csv" + - "s3://gdelt-open-data/events/20190914.export.csv" + provider: + type: "object" + title: "Storage Provider" + description: + "The storage Provider or Location of the file(s) which should\ + \ be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + const: "HTTPS" + enum: + - "HTTPS" + user_agent: + type: "boolean" + title: "User-Agent" + default: false + description: "Add User-Agent to request" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "GCS" + enum: + - "GCS" + service_account_json: + type: "string" + title: "Service Account JSON" + airbyte_secret: true + description: + "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + x-speakeasy-param-sensitive: true + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "S3" + enum: + - "S3" + aws_access_key_id: + type: "string" + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + title: "Storage" + const: "AzBlob" + enum: + - "AzBlob" + storage_account: + type: "string" + title: "Storage Account" + description: + "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + title: "SAS Token" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + shared_key: + type: "string" + title: "Shared Key" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SSH" + enum: + - "SSH" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SCP" + enum: + - "SCP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SFTP" + enum: + - "SFTP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + description: + "WARNING: Note that the local storage URL available for\ + \ reading must start with the local mount \"/local/\" at the moment\ + \ until we implement more advanced docker mounting options." + const: "local" + enum: + - "local" + sourceType: + title: "file" + const: "file" + enum: + - "file" + order: 0 + type: "string" + source-file-update: + title: "File Source Spec" + type: "object" + required: + - "dataset_name" + - "format" + - "url" + - "provider" + properties: + dataset_name: + type: "string" + title: "Dataset Name" + description: + "The Name of the final table to replicate this file into (should\ + \ include letters, numbers dash and underscores only)." + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "excel_binary" + - "fwf" + - "feather" + - "parquet" + - "yaml" + default: "csv" + title: "File Format" + description: + "The Format of the file which should be replicated (Warning:\ + \ some formats may be experimental, please refer to the docs)." + reader_options: + type: "string" + title: "Reader Options" + description: + "This should be a string in JSON format. It depends on the\ + \ chosen file format to provide additional options and tune its behavior." + examples: + - "{}" + - '{"sep": " "}' + - "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"\ + ] }" + url: + type: "string" + title: "URL" + description: "The URL path to access the file which should be replicated." + examples: + - "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" + - "gs://my-google-bucket/data.csv" + - "s3://gdelt-open-data/events/20190914.export.csv" + provider: + type: "object" + title: "Storage Provider" + description: + "The storage Provider or Location of the file(s) which should\ + \ be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + const: "HTTPS" + enum: + - "HTTPS" + user_agent: + type: "boolean" + title: "User-Agent" + default: false + description: "Add User-Agent to request" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "GCS" + enum: + - "GCS" + service_account_json: + type: "string" + title: "Service Account JSON" + airbyte_secret: true + description: + "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "S3" + enum: + - "S3" + aws_access_key_id: + type: "string" + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + title: "Storage" + const: "AzBlob" + enum: + - "AzBlob" + storage_account: + type: "string" + title: "Storage Account" + description: + "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + title: "SAS Token" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + shared_key: + type: "string" + title: "Shared Key" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SSH" + enum: + - "SSH" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SCP" + enum: + - "SCP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SFTP" + enum: + - "SFTP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + description: + "WARNING: Note that the local storage URL available for\ + \ reading must start with the local mount \"/local/\" at the moment\ + \ until we implement more advanced docker mounting options." + const: "local" + enum: + - "local" + source-lokalise: + type: "object" + required: + - "api_key" + - "project_id" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Lokalise API Key with read-access. Available at Profile settings\ + \ > API tokens. See here." + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + project_id: + type: "string" + description: "Lokalise project ID. Available at Project Settings > General." + title: "Project Id" + order: 1 + sourceType: + title: "lokalise" + const: "lokalise" + enum: + - "lokalise" + order: 0 + type: "string" + source-lokalise-update: + type: "object" + required: + - "api_key" + - "project_id" + properties: + api_key: + type: "string" + description: + "Lokalise API Key with read-access. Available at Profile settings\ + \ > API tokens. See here." + title: "API Key" + airbyte_secret: true + order: 0 + project_id: + type: "string" + description: "Lokalise project ID. Available at Project Settings > General." + title: "Project Id" + order: 1 + source-zoho-crm: + title: "Zoho Crm Configuration" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "environment" + - "dc_region" + - "edition" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "OAuth2.0 Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "OAuth2.0 Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "OAuth2.0 Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + dc_region: + title: "Data Center Location" + type: "string" + description: + "Please choose the region of your Data Center location. More\ + \ info by this Link" + enum: + - "US" + - "AU" + - "EU" + - "IN" + - "CN" + - "JP" + environment: + title: "Environment" + type: "string" + description: "Please choose the environment" + enum: + - "Production" + - "Developer" + - "Sandbox" + start_datetime: + title: "Start Date" + type: + - "string" + - "null" + examples: + - "2000-01-01" + - "2000-01-01 13:00" + - "2000-01-01 13:00:00" + - "2000-01-01T13:00+00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`" + format: "date-time" + edition: + title: "Zoho CRM Edition" + type: "string" + description: + "Choose your Edition of Zoho CRM to determine API Concurrency\ + \ Limits" + enum: + - "Free" + - "Standard" + - "Professional" + - "Enterprise" + - "Ultimate" + default: "Free" + sourceType: + title: "zoho-crm" + const: "zoho-crm" + enum: + - "zoho-crm" + order: 0 + type: "string" + source-zoho-crm-update: + title: "Zoho Crm Configuration" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "environment" + - "dc_region" + - "edition" + properties: + client_id: + type: "string" + title: "Client ID" + description: "OAuth2.0 Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "OAuth2.0 Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "OAuth2.0 Refresh Token" + airbyte_secret: true + dc_region: + title: "Data Center Location" + type: "string" + description: + "Please choose the region of your Data Center location. More\ + \ info by this Link" + enum: + - "US" + - "AU" + - "EU" + - "IN" + - "CN" + - "JP" + environment: + title: "Environment" + type: "string" + description: "Please choose the environment" + enum: + - "Production" + - "Developer" + - "Sandbox" + start_datetime: + title: "Start Date" + type: + - "string" + - "null" + examples: + - "2000-01-01" + - "2000-01-01 13:00" + - "2000-01-01 13:00:00" + - "2000-01-01T13:00+00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`" + format: "date-time" + edition: + title: "Zoho CRM Edition" + type: "string" + description: + "Choose your Edition of Zoho CRM to determine API Concurrency\ + \ Limits" + enum: + - "Free" + - "Standard" + - "Professional" + - "Enterprise" + - "Ultimate" + default: "Free" + source-gainsight-px: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "The Aptrinsic API Key which is recieved from the dashboard\ + \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "gainsight-px" + const: "gainsight-px" + enum: + - "gainsight-px" + order: 0 + type: "string" + source-gainsight-px-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "The Aptrinsic API Key which is recieved from the dashboard\ + \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" + order: 0 + source-stripe: + title: "Stripe Source Spec" + type: "object" + required: + - "client_secret" + - "account_id" + - "sourceType" + properties: + account_id: + type: "string" + title: "Account ID" + description: + "Your Stripe account ID (starts with 'acct_', find yours here)." + order: 0 + client_secret: + type: "string" + title: "Secret Key" + description: + "Stripe API key (usually starts with 'sk_live_'; find yours\ + \ here)." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ + \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window in days" + default: 0 + minimum: 0 + description: + "When set, the connector will always re-export data from the\ + \ past N days, where N is the value set here. This is useful if your data\ + \ is frequently updated after creation. The Lookback Window only applies\ + \ to streams that do not support event-based incremental syncs: Events,\ + \ SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks,\ + \ Refunds. More info here" + order: 3 + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 365 + minimum: 1 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + description: + "The time increment used by the connector when requesting data\ + \ from the Stripe API. The bigger the value is, the less requests will\ + \ be made and faster the sync will be. On the other hand, the more seldom\ + \ the state is persisted." + order: 4 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 20 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker thread to use for the sync. The performance\ + \ upper boundary depends on call_rate_limit setting and type of account." + order: 5 + call_rate_limit: + type: "integer" + title: "Max number of API calls per second" + examples: + - 25 + - 100 + description: + "The number of API calls per second that you allow connector\ + \ to make. This value can not be bigger than real API call rate limit\ + \ (https://stripe.com/docs/rate-limits). If not specified the default\ + \ maximum is 25 and 100 calls per second for test and production tokens\ + \ respectively." + sourceType: + title: "stripe" + const: "stripe" + enum: + - "stripe" + order: 0 + type: "string" + source-stripe-update: + title: "Stripe Source Spec" + type: "object" + required: + - "client_secret" + - "account_id" + properties: + account_id: + type: "string" + title: "Account ID" + description: + "Your Stripe account ID (starts with 'acct_', find yours here)." + order: 0 + client_secret: + type: "string" + title: "Secret Key" + description: + "Stripe API key (usually starts with 'sk_live_'; find yours\ + \ here)." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ + \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window in days" + default: 0 + minimum: 0 + description: + "When set, the connector will always re-export data from the\ + \ past N days, where N is the value set here. This is useful if your data\ + \ is frequently updated after creation. The Lookback Window only applies\ + \ to streams that do not support event-based incremental syncs: Events,\ + \ SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks,\ + \ Refunds. More info here" + order: 3 + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 365 + minimum: 1 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + description: + "The time increment used by the connector when requesting data\ + \ from the Stripe API. The bigger the value is, the less requests will\ + \ be made and faster the sync will be. On the other hand, the more seldom\ + \ the state is persisted." + order: 4 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 20 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker thread to use for the sync. The performance\ + \ upper boundary depends on call_rate_limit setting and type of account." + order: 5 + call_rate_limit: + type: "integer" + title: "Max number of API calls per second" + examples: + - 25 + - 100 + description: + "The number of API calls per second that you allow connector\ + \ to make. This value can not be bigger than real API call rate limit\ + \ (https://stripe.com/docs/rate-limits). If not specified the default\ + \ maximum is 25 and 100 calls per second for test and production tokens\ + \ respectively." + source-buzzsprout: + type: "object" + required: + - "api_key" + - "podcast_id" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + podcast_id: + type: "string" + description: "Podcast ID found in `https://www.buzzsprout.com/my/profile/api`" + title: "Podcast ID" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "buzzsprout" + const: "buzzsprout" + enum: + - "buzzsprout" + order: 0 + type: "string" + source-buzzsprout-update: + type: "object" + required: + - "api_key" + - "podcast_id" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + podcast_id: + type: "string" + description: "Podcast ID found in `https://www.buzzsprout.com/my/profile/api`" + title: "Podcast ID" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-youtube-analytics: + title: "YouTube Analytics Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "youtube-analytics" + const: "youtube-analytics" + enum: + - "youtube-analytics" + order: 0 + type: "string" + source-youtube-analytics-update: + title: "YouTube Analytics Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + source-google-sheets: + title: "Google Sheets Source Spec" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + - "sourceType" + properties: + batch_size: + type: "integer" + title: "Row Batch Size" + description: + "Default value is 200. An integer representing row batch size\ + \ for each sent request to Google Sheets API. Row batch size means how\ + \ many rows are processed from the google sheet, for example default value\ + \ 200 would process rows 1-201, then 201-401 and so on. Based on Google\ + \ Sheets API limits documentation, it is possible to send up to 300\ + \ requests per minute, but each individual request has to be processed\ + \ under 180 seconds, otherwise the request returns a timeout error. In\ + \ regards to this information, consider network speed and number of columns\ + \ of the google sheet when deciding a batch_size value. Default value\ + \ should cover most of the cases, but if a google sheet has over 100,000\ + \ records or more, consider increasing batch_size value." + default: 200 + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "Enter the link to the Google spreadsheet you want to sync.\ + \ To copy the link, click the 'Share' button in the top-right corner of\ + \ the spreadsheet, then click 'Copy link'." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" + names_conversion: + type: "boolean" + title: "Convert Column Names to SQL-Compliant Format" + description: + "Enables the conversion of column names to a standardized,\ + \ SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this\ + \ option if your destination is SQL-based." + default: false + credentials: + type: "object" + title: "Authentication" + description: "Credentials for connecting to the Google Sheets API" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Enter your Google application's refresh token. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + service_account_info: + type: "string" + title: "Service Account Information." + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + x-speakeasy-param-sensitive: true + sourceType: + title: "google-sheets" + const: "google-sheets" + enum: + - "google-sheets" + order: 0 + type: "string" + source-google-sheets-update: + title: "Google Sheets Source Spec" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + properties: + batch_size: + type: "integer" + title: "Row Batch Size" + description: + "Default value is 200. An integer representing row batch size\ + \ for each sent request to Google Sheets API. Row batch size means how\ + \ many rows are processed from the google sheet, for example default value\ + \ 200 would process rows 1-201, then 201-401 and so on. Based on Google\ + \ Sheets API limits documentation, it is possible to send up to 300\ + \ requests per minute, but each individual request has to be processed\ + \ under 180 seconds, otherwise the request returns a timeout error. In\ + \ regards to this information, consider network speed and number of columns\ + \ of the google sheet when deciding a batch_size value. Default value\ + \ should cover most of the cases, but if a google sheet has over 100,000\ + \ records or more, consider increasing batch_size value." + default: 200 + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "Enter the link to the Google spreadsheet you want to sync.\ + \ To copy the link, click the 'Share' button in the top-right corner of\ + \ the spreadsheet, then click 'Copy link'." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" + names_conversion: + type: "boolean" + title: "Convert Column Names to SQL-Compliant Format" + description: + "Enables the conversion of column names to a standardized,\ + \ SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this\ + \ option if your destination is SQL-based." + default: false + credentials: + type: "object" + title: "Authentication" + description: "Credentials for connecting to the Google Sheets API" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Enter your Google application's refresh token. See Google's\ + \ documentation for more information." + airbyte_secret: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + service_account_info: + type: "string" + title: "Service Account Information." + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + source-zendesk-talk: + type: "object" + title: "Source Zendesk Talk Spec" + required: + - "start_date" + - "subdomain" + - "sourceType" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: + "This is your Zendesk subdomain that can be found in your account\ + \ URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN\ + \ is the value of your subdomain." + credentials: + title: "Authentication" + type: "object" + order: 1 + description: + "Zendesk service provides two authentication methods. Choose\ + \ between: `OAuth2.0` or `API token`." + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "api_token" + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + examples: + - "2020-10-15T00:00:00Z" + sourceType: + title: "zendesk-talk" + const: "zendesk-talk" + enum: + - "zendesk-talk" + order: 0 + type: "string" + source-zendesk-talk-update: + type: "object" + title: "Source Zendesk Talk Spec" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: + "This is your Zendesk subdomain that can be found in your account\ + \ URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN\ + \ is the value of your subdomain." + credentials: + title: "Authentication" + type: "object" + order: 1 + description: + "Zendesk service provides two authentication methods. Choose\ + \ between: `OAuth2.0` or `API token`." + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "api_token" + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + examples: + - "2020-10-15T00:00:00Z" + source-freshdesk: + type: "object" + required: + - "api_key" + - "domain" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain: + type: "string" + order: 2 + title: "Domain" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: "integer" + order: 3 + title: "Requests per minute" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: + "UTC date and time. Any data created after this date will be\ + \ replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: "integer" + order: 5 + title: "Lookback Window" + default: 14 + description: + "Number of days for lookback window for the stream Satisfaction\ + \ Ratings" + sourceType: + title: "freshdesk" + const: "freshdesk" + enum: + - "freshdesk" + order: 0 + type: "string" + source-freshdesk-update: + type: "object" + required: + - "api_key" + - "domain" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + domain: + type: "string" + order: 2 + title: "Domain" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: "integer" + order: 3 + title: "Requests per minute" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: + "UTC date and time. Any data created after this date will be\ + \ replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: "integer" + order: 5 + title: "Lookback Window" + default: 14 + description: + "Number of days for lookback window for the stream Satisfaction\ + \ Ratings" + source-asana: + title: "Asana Spec" + type: "object" + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: + "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_export_ids: + title: "Organization Export IDs" + description: "Globally unique identifiers for the organization exports" + type: "array" + sourceType: + title: "asana" + const: "asana" + enum: + - "asana" + order: 0 + type: "string" + source-asana-update: + title: "Asana Spec" + type: "object" + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: + "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + organization_export_ids: + title: "Organization Export IDs" + description: "Globally unique identifiers for the organization exports" + type: "array" + source-posthog: + title: "PostHog Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + start_date: + title: "Start Date" + type: "string" + description: + "The date from which you'd like to replicate the data. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + api_key: + type: "string" + airbyte_secret: true + title: "API Key" + description: + "API Key. See the docs for information on how to generate this key." + x-speakeasy-param-sensitive: true + base_url: + type: "string" + default: "https://app.posthog.com" + title: "Base URL" + description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." + examples: + - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 + sourceType: + title: "posthog" + const: "posthog" + enum: + - "posthog" + order: 0 + type: "string" + source-posthog-update: + title: "PostHog Spec" + type: "object" + required: + - "api_key" + - "start_date" + properties: + start_date: + title: "Start Date" + type: "string" + description: + "The date from which you'd like to replicate the data. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + api_key: + type: "string" + airbyte_secret: true + title: "API Key" + description: + "API Key. See the docs for information on how to generate this key." + base_url: + type: "string" + default: "https://app.posthog.com" + title: "Base URL" + description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." + examples: + - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 + source-split-io: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "split-io" + const: "split-io" + enum: + - "split-io" + order: 0 + type: "string" + source-split-io-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-getlago: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_url: + type: "string" + description: "Your Lago API URL" + title: "API Url" + default: "https://api.getlago.com/api/v1" + order: 0 + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "getlago" + const: "getlago" + enum: + - "getlago" + order: 0 + type: "string" + source-getlago-update: + type: "object" + required: + - "api_key" + properties: + api_url: + type: "string" + description: "Your Lago API URL" + title: "API Url" + default: "https://api.getlago.com/api/v1" + order: 0 + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 1 + source-gridly: + title: "Gridly Spec" + type: "object" + required: + - "api_key" + - "grid_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + grid_id: + type: "string" + title: "Grid ID" + description: "ID of a grid, or can be ID of a branch" + sourceType: + title: "gridly" + const: "gridly" + enum: + - "gridly" + order: 0 + type: "string" + source-gridly-update: + title: "Gridly Spec" + type: "object" + required: + - "api_key" + - "grid_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + grid_id: + type: "string" + title: "Grid ID" + description: "ID of a grid, or can be ID of a branch" + source-microsoft-teams: + title: "Microsoft Teams Spec" + type: "object" + required: + - "period" + - "sourceType" + properties: + period: + type: "string" + title: "Period" + description: + "Specifies the length of time over which the Team Device Report\ + \ stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - "D7" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Microsoft" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Microsoft (OAuth 2.0)" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A Refresh Token to renew the expired Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate via Microsoft" + required: + - "tenant_id" + - "client_id" + - "client_secret" + properties: + auth_type: + type: "string" + const: "Token" + enum: + - "Token" + default: "Token" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "microsoft-teams" + const: "microsoft-teams" + enum: + - "microsoft-teams" + order: 0 + type: "string" + source-microsoft-teams-update: + title: "Microsoft Teams Spec" + type: "object" + required: + - "period" + properties: + period: + type: "string" + title: "Period" + description: + "Specifies the length of time over which the Team Device Report\ + \ stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - "D7" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Microsoft" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Microsoft (OAuth 2.0)" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: "object" + title: "Authenticate via Microsoft" + required: + - "tenant_id" + - "client_id" + - "client_secret" + properties: + auth_type: + type: "string" + const: "Token" + enum: + - "Token" + default: "Token" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + source-looker: + type: "object" + required: + - "client_id" + - "client_secret" + - "domain" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + description: + "The Client ID is first part of an API3 key that is specific\ + \ to each Looker user. See the docs for more information on how to generate this key." + client_secret: + type: "string" + order: 1 + title: "Client Secret" + description: "The Client Secret is second part of an API3 key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain: + type: "string" + order: 2 + title: "Domain" + examples: + - "domainname.looker.com" + - "looker.clientname.com" + - "123.123.124.123:8000" + description: + "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\ + \ address" + run_look_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + order: 3 + title: "Look IDs to Run" + description: "The IDs of any Looks to run" + sourceType: + title: "looker" + const: "looker" + enum: + - "looker" + order: 0 + type: "string" + source-looker-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "domain" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + description: + "The Client ID is first part of an API3 key that is specific\ + \ to each Looker user. See the docs for more information on how to generate this key." + client_secret: + type: "string" + order: 1 + title: "Client Secret" + description: "The Client Secret is second part of an API3 key." + airbyte_secret: true + domain: + type: "string" + order: 2 + title: "Domain" + examples: + - "domainname.looker.com" + - "looker.clientname.com" + - "123.123.124.123:8000" + description: + "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\ + \ address" + run_look_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + order: 3 + title: "Look IDs to Run" + description: "The IDs of any Looks to run" + source-dropbox-sign: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://app.hellosign.com/home/myAccount#api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "dropbox-sign" + const: "dropbox-sign" + enum: + - "dropbox-sign" + order: 0 + type: "string" + source-dropbox-sign-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://app.hellosign.com/home/myAccount#api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-google-tasks: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + records_limit: + type: "string" + description: "The maximum number of records to be returned per request" + order: 0 + title: "Records Limit" + default: "50" + sourceType: + title: "google-tasks" + const: "google-tasks" + enum: + - "google-tasks" + order: 0 + type: "string" + source-google-tasks-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + records_limit: + type: "string" + description: "The maximum number of records to be returned per request" + order: 0 + title: "Records Limit" + default: "50" + source-amazon-seller-partner: + title: "Amazon Seller Partner Spec" + type: "object" + required: + - "aws_environment" + - "region" + - "account_type" + - "lwa_app_id" + - "lwa_client_secret" + - "refresh_token" + - "sourceType" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + aws_environment: + title: "AWS Environment" + description: "Select the AWS Environment." + enum: + - "PRODUCTION" + - "SANDBOX" + default: "PRODUCTION" + type: "string" + order: 1 + region: + title: "AWS Region" + description: "Select the AWS Region." + enum: + - "AE" + - "AU" + - "BE" + - "BR" + - "CA" + - "DE" + - "EG" + - "ES" + - "FR" + - "GB" + - "IN" + - "IT" + - "JP" + - "MX" + - "NL" + - "PL" + - "SA" + - "SE" + - "SG" + - "TR" + - "UK" + - "US" + default: "US" + type: "string" + order: 2 + account_type: + title: "AWS Seller Partner Account Type" + description: + "Type of the Account you're going to authorize the Airbyte\ + \ application by" + enum: + - "Seller" + - "Vendor" + default: "Seller" + type: "string" + order: 3 + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "The Refresh Token obtained via OAuth flow authorization." + airbyte_secret: true + order: 6 + type: "string" + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If start date is not provided\ + \ or older than 2 years ago from today, the date 2 years ago from today\ + \ will be used." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + order: 7 + type: "string" + format: "date-time" + replication_end_date: + title: "End Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$" + examples: + - "2017-01-25T00:00:00Z" + order: 8 + type: "string" + format: "date-time" + period_in_days: + title: "Period In Days" + type: "integer" + description: + "For syncs spanning a large date range, this option is used\ + \ to request data in a smaller fixed window to improve sync reliability.\ + \ This time window can be configured granularly by day." + default: 90 + minimum: 1 + order: 9 + report_options_list: + title: "Report Options" + description: + "Additional information passed to reports. This varies by report\ + \ type." + order: 10 + type: "array" + items: + type: "object" + title: "Report Options" + required: + - "report_name" + - "stream_name" + - "options_list" + properties: + report_name: + title: "Report Name" + type: "string" + order: 0 + enum: + - "GET_AFN_INVENTORY_DATA" + - "GET_AFN_INVENTORY_DATA_BY_COUNTRY" + - "GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL" + - "GET_FBA_ESTIMATED_FBA_FEES_TXT_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_PROMOTION_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_REPLACEMENT_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_ORDER_DETAIL_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_SHIPMENT_DETAIL_DATA" + - "GET_FBA_INVENTORY_PLANNING_DATA" + - "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA" + - "GET_FBA_REIMBURSEMENTS_DATA" + - "GET_FBA_SNS_FORECAST_DATA" + - "GET_FBA_SNS_PERFORMANCE_DATA" + - "GET_FBA_STORAGE_FEE_CHARGES_DATA" + - "GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_FLAT_FILE_ARCHIVED_ORDERS_DATA_BY_ORDER_DATE" + - "GET_FLAT_FILE_OPEN_LISTINGS_DATA" + - "GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE" + - "GET_LEDGER_DETAIL_VIEW_DATA" + - "GET_LEDGER_SUMMARY_VIEW_DATA" + - "GET_MERCHANT_CANCELLED_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_ALL_DATA" + - "GET_MERCHANT_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT" + - "GET_MERCHANT_LISTINGS_INACTIVE_DATA" + - "GET_MERCHANTS_LISTINGS_FYP_REPORT" + - "GET_ORDER_REPORT_DATA_SHIPPING" + - "GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT" + - "GET_SELLER_FEEDBACK_DATA" + - "GET_STRANDED_INVENTORY_UI_DATA" + - "GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE" + - "GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_XML_BROWSE_TREE_DATA" + - "GET_VENDOR_REAL_TIME_INVENTORY_REPORT" + - "GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT" + - "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + - "GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT" + - "GET_SALES_AND_TRAFFIC_REPORT" + - "GET_VENDOR_SALES_REPORT" + - "GET_VENDOR_INVENTORY_REPORT" + - "GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT" + - "GET_VENDOR_TRAFFIC_REPORT" + stream_name: + title: "Stream Name" + type: "string" + order: 1 + options_list: + title: "List of options" + description: "List of options" + type: "array" + order: 2 + items: + type: "object" + required: + - "option_name" + - "option_value" + properties: + option_name: + title: "Name" + type: "string" + order: 0 + option_value: + title: "Value" + type: "string" + order: 1 + wait_to_avoid_fatal_errors: + title: "Wait between requests to avoid fatal statuses in reports" + type: "boolean" + description: + "For report based streams with known amount of requests per\ + \ time period, this option will use waiting time between requests to avoid\ + \ fatal statuses in reports. See Troubleshooting section for more details" + default: false + order: 11 + sourceType: + title: "amazon-seller-partner" + const: "amazon-seller-partner" + enum: + - "amazon-seller-partner" + order: 0 + type: "string" + source-amazon-seller-partner-update: + title: "Amazon Seller Partner Spec" + type: "object" + required: + - "aws_environment" + - "region" + - "account_type" + - "lwa_app_id" + - "lwa_client_secret" + - "refresh_token" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + aws_environment: + title: "AWS Environment" + description: "Select the AWS Environment." + enum: + - "PRODUCTION" + - "SANDBOX" + default: "PRODUCTION" + type: "string" + order: 1 + region: + title: "AWS Region" + description: "Select the AWS Region." + enum: + - "AE" + - "AU" + - "BE" + - "BR" + - "CA" + - "DE" + - "EG" + - "ES" + - "FR" + - "GB" + - "IN" + - "IT" + - "JP" + - "MX" + - "NL" + - "PL" + - "SA" + - "SE" + - "SG" + - "TR" + - "UK" + - "US" + default: "US" + type: "string" + order: 2 + account_type: + title: "AWS Seller Partner Account Type" + description: + "Type of the Account you're going to authorize the Airbyte\ + \ application by" + enum: + - "Seller" + - "Vendor" + default: "Seller" + type: "string" + order: 3 + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + refresh_token: + title: "Refresh Token" + description: "The Refresh Token obtained via OAuth flow authorization." + airbyte_secret: true + order: 6 + type: "string" + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If start date is not provided\ + \ or older than 2 years ago from today, the date 2 years ago from today\ + \ will be used." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + order: 7 + type: "string" + format: "date-time" + replication_end_date: + title: "End Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$" + examples: + - "2017-01-25T00:00:00Z" + order: 8 + type: "string" + format: "date-time" + period_in_days: + title: "Period In Days" + type: "integer" + description: + "For syncs spanning a large date range, this option is used\ + \ to request data in a smaller fixed window to improve sync reliability.\ + \ This time window can be configured granularly by day." + default: 90 + minimum: 1 + order: 9 + report_options_list: + title: "Report Options" + description: + "Additional information passed to reports. This varies by report\ + \ type." + order: 10 + type: "array" + items: + type: "object" + title: "Report Options" + required: + - "report_name" + - "stream_name" + - "options_list" + properties: + report_name: + title: "Report Name" + type: "string" + order: 0 + enum: + - "GET_AFN_INVENTORY_DATA" + - "GET_AFN_INVENTORY_DATA_BY_COUNTRY" + - "GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL" + - "GET_FBA_ESTIMATED_FBA_FEES_TXT_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_PROMOTION_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_REPLACEMENT_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_ORDER_DETAIL_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_SHIPMENT_DETAIL_DATA" + - "GET_FBA_INVENTORY_PLANNING_DATA" + - "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA" + - "GET_FBA_REIMBURSEMENTS_DATA" + - "GET_FBA_SNS_FORECAST_DATA" + - "GET_FBA_SNS_PERFORMANCE_DATA" + - "GET_FBA_STORAGE_FEE_CHARGES_DATA" + - "GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_FLAT_FILE_ARCHIVED_ORDERS_DATA_BY_ORDER_DATE" + - "GET_FLAT_FILE_OPEN_LISTINGS_DATA" + - "GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE" + - "GET_LEDGER_DETAIL_VIEW_DATA" + - "GET_LEDGER_SUMMARY_VIEW_DATA" + - "GET_MERCHANT_CANCELLED_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_ALL_DATA" + - "GET_MERCHANT_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT" + - "GET_MERCHANT_LISTINGS_INACTIVE_DATA" + - "GET_MERCHANTS_LISTINGS_FYP_REPORT" + - "GET_ORDER_REPORT_DATA_SHIPPING" + - "GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT" + - "GET_SELLER_FEEDBACK_DATA" + - "GET_STRANDED_INVENTORY_UI_DATA" + - "GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE" + - "GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_XML_BROWSE_TREE_DATA" + - "GET_VENDOR_REAL_TIME_INVENTORY_REPORT" + - "GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT" + - "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + - "GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT" + - "GET_SALES_AND_TRAFFIC_REPORT" + - "GET_VENDOR_SALES_REPORT" + - "GET_VENDOR_INVENTORY_REPORT" + - "GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT" + - "GET_VENDOR_TRAFFIC_REPORT" + stream_name: + title: "Stream Name" + type: "string" + order: 1 + options_list: + title: "List of options" + description: "List of options" + type: "array" + order: 2 + items: + type: "object" + required: + - "option_name" + - "option_value" + properties: + option_name: + title: "Name" + type: "string" + order: 0 + option_value: + title: "Value" + type: "string" + order: 1 + wait_to_avoid_fatal_errors: + title: "Wait between requests to avoid fatal statuses in reports" + type: "boolean" + description: + "For report based streams with known amount of requests per\ + \ time period, this option will use waiting time between requests to avoid\ + \ fatal statuses in reports. See Troubleshooting section for more details" + default: false + order: 11 + source-northpass-lms: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "northpass-lms" + const: "northpass-lms" + enum: + - "northpass-lms" + order: 0 + type: "string" + source-northpass-lms-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + source-recreation: + type: "object" + required: + - "apikey" + - "sourceType" + properties: + apikey: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + query_campsites: + type: "string" + title: "Query Campsite" + order: 1 + sourceType: + title: "recreation" + const: "recreation" + enum: + - "recreation" + order: 0 + type: "string" + source-recreation-update: + type: "object" + required: + - "apikey" + properties: + apikey: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + query_campsites: + type: "string" + title: "Query Campsite" + order: 1 + source-breezy-hr: + type: "object" + required: + - "api_key" + - "company_id" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + company_id: + type: "string" + order: 1 + title: "Company ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "breezy-hr" + const: "breezy-hr" + enum: + - "breezy-hr" + order: 0 + type: "string" + source-breezy-hr-update: + type: "object" + required: + - "api_key" + - "company_id" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + company_id: + type: "string" + order: 1 + title: "Company ID" + airbyte_secret: true + source-linkedin-ads: + title: "Linkedin Ads Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Access Token" + type: "object" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + examples: + - "2021-05-17" + format: "date" + lookback_window: + type: "integer" + title: "Lookback Window" + default: 0 + minimum: 0 + description: "How far into the past to look for records. (in days)" + account_ids: + title: "Account IDs" + type: "array" + description: + "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." + items: + type: "integer" + examples: + - 123456789 + default: [] + ad_analytics_reports: + title: "Custom Ad Analytics Reports" + type: "array" + items: + type: "object" + title: "Ad Analytics Report Configuration" + description: "Config for custom ad Analytics Report" + required: + - "name" + - "pivot_by" + - "time_granularity" + properties: + name: + title: "Report Name" + description: "The name for the custom report." + type: "string" + pivot_by: + title: "Pivot Category" + description: + "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." + type: "string" + enum: + - "COMPANY" + - "ACCOUNT" + - "SHARE" + - "CAMPAIGN" + - "CREATIVE" + - "CAMPAIGN_GROUP" + - "CONVERSION" + - "CONVERSATION_NODE" + - "CONVERSATION_NODE_OPTION_INDEX" + - "SERVING_LOCATION" + - "CARD_INDEX" + - "MEMBER_COMPANY_SIZE" + - "MEMBER_INDUSTRY" + - "MEMBER_SENIORITY" + - "MEMBER_JOB_TITLE" + - "MEMBER_JOB_FUNCTION" + - "MEMBER_COUNTRY_V2" + - "MEMBER_REGION_V2" + - "MEMBER_COMPANY" + - "PLACEMENT_NAME" + - "IMPRESSION_DEVICE_TYPE" + time_granularity: + title: "Time Granularity" + description: + "Choose how to group the data in your report by time.\ + \ The options are:
    - 'ALL': A single result summarizing the entire\ + \ time range.
    - 'DAILY': Group results by each day.
    - 'MONTHLY':\ + \ Group results by each month.
    - 'YEARLY': Group results by each\ + \ year.
    Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." + type: "string" + enum: + - "ALL" + - "DAILY" + - "MONTHLY" + - "YEARLY" + default: [] + sourceType: + title: "linkedin-ads" + const: "linkedin-ads" + enum: + - "linkedin-ads" + order: 0 + type: "string" + source-linkedin-ads-update: + title: "Linkedin Ads Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + - title: "Access Token" + type: "object" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + examples: + - "2021-05-17" + format: "date" + lookback_window: + type: "integer" + title: "Lookback Window" + default: 0 + minimum: 0 + description: "How far into the past to look for records. (in days)" + account_ids: + title: "Account IDs" + type: "array" + description: + "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." + items: + type: "integer" + examples: + - 123456789 + default: [] + ad_analytics_reports: + title: "Custom Ad Analytics Reports" + type: "array" + items: + type: "object" + title: "Ad Analytics Report Configuration" + description: "Config for custom ad Analytics Report" + required: + - "name" + - "pivot_by" + - "time_granularity" + properties: + name: + title: "Report Name" + description: "The name for the custom report." + type: "string" + pivot_by: + title: "Pivot Category" + description: + "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." + type: "string" + enum: + - "COMPANY" + - "ACCOUNT" + - "SHARE" + - "CAMPAIGN" + - "CREATIVE" + - "CAMPAIGN_GROUP" + - "CONVERSION" + - "CONVERSATION_NODE" + - "CONVERSATION_NODE_OPTION_INDEX" + - "SERVING_LOCATION" + - "CARD_INDEX" + - "MEMBER_COMPANY_SIZE" + - "MEMBER_INDUSTRY" + - "MEMBER_SENIORITY" + - "MEMBER_JOB_TITLE" + - "MEMBER_JOB_FUNCTION" + - "MEMBER_COUNTRY_V2" + - "MEMBER_REGION_V2" + - "MEMBER_COMPANY" + - "PLACEMENT_NAME" + - "IMPRESSION_DEVICE_TYPE" + time_granularity: + title: "Time Granularity" + description: + "Choose how to group the data in your report by time.\ + \ The options are:
    - 'ALL': A single result summarizing the entire\ + \ time range.
    - 'DAILY': Group results by each day.
    - 'MONTHLY':\ + \ Group results by each month.
    - 'YEARLY': Group results by each\ + \ year.
    Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." + type: "string" + enum: + - "ALL" + - "DAILY" + - "MONTHLY" + - "YEARLY" + default: [] + source-us-census: + type: "object" + required: + - "query_path" + - "api_key" + - "sourceType" + properties: + query_params: + type: "string" + description: + "The query parameters portion of the GET request, without the\ + \ api key" + order: 0 + pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$" + examples: + - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" + - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path: + type: "string" + description: "The path portion of the GET request" + order: 1 + pattern: "^data(\\/[\\w\\d]+)+$" + examples: + - "data/2019/cbp" + - "data/2018/acs" + - "data/timeseries/healthins/sahie" + api_key: + type: "string" + description: + "Your API Key. Get your key here." + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "us-census" + const: "us-census" + enum: + - "us-census" + order: 0 + type: "string" + source-us-census-update: + type: "object" + required: + - "query_path" + - "api_key" + properties: + query_params: + type: "string" + description: + "The query parameters portion of the GET request, without the\ + \ api key" + order: 0 + pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$" + examples: + - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" + - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path: + type: "string" + description: "The path portion of the GET request" + order: 1 + pattern: "^data(\\/[\\w\\d]+)+$" + examples: + - "data/2019/cbp" + - "data/2018/acs" + - "data/timeseries/healthins/sahie" + api_key: + type: "string" + description: + "Your API Key. Get your key here." + order: 2 + airbyte_secret: true + source-goldcast: + title: "goldcast.io Source Spec" + type: "object" + required: + - "access_key" + - "sourceType" + properties: + access_key: + type: "string" + description: + "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "goldcast" + const: "goldcast" + enum: + - "goldcast" + order: 0 + type: "string" + source-goldcast-update: + title: "goldcast.io Source Spec" + type: "object" + required: + - "access_key" + properties: + access_key: + type: "string" + description: + "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + source-pinterest: + title: "Pinterest Spec" + type: "object" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set a date,\ + \ it would be defaulted to latest allowed date by api (89 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + status: + title: "Status" + description: + "For the ads, ad_groups, and campaigns streams, specifying\ + \ a status will filter out records that do not match the specified ones.\ + \ If a status is not specified, the source will default to records with\ + \ a status of either ACTIVE or PAUSED." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true + credentials: + title: "OAuth2.0" + type: "object" + required: + - "auth_method" + - "refresh_token" + - "client_id" + - "client_secret" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to obtain new Access Token, when it's expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + custom_reports: + title: "Custom Reports" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + type: "array" + items: + title: "ReportConfig" + description: "Config for custom report" + type: "object" + required: + - "name" + - "level" + - "granularity" + - "columns" + properties: + name: + title: "Name" + description: "The name value of report" + type: "string" + order: 0 + level: + title: "Level" + description: "Chosen level for API" + default: "ADVERTISER" + enum: + - "ADVERTISER" + - "ADVERTISER_TARGETING" + - "CAMPAIGN" + - "CAMPAIGN_TARGETING" + - "AD_GROUP" + - "AD_GROUP_TARGETING" + - "PIN_PROMOTION" + - "PIN_PROMOTION_TARGETING" + - "KEYWORD" + - "PRODUCT_GROUP" + - "PRODUCT_GROUP_TARGETING" + - "PRODUCT_ITEM" + type: "string" + order: 1 + granularity: + title: "Granularity" + description: "Chosen granularity for API" + default: "TOTAL" + enum: + - "TOTAL" + - "DAY" + - "HOUR" + - "WEEK" + - "MONTH" + type: "string" + order: 2 + columns: + title: "Columns" + description: "A list of chosen columns" + default: [] + type: "array" + order: 3 + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "ADVERTISER_ID" + - "AD_ACCOUNT_ID" + - "AD_GROUP_ENTITY_STATUS" + - "AD_GROUP_ID" + - "AD_ID" + - "CAMPAIGN_DAILY_SPEND_CAP" + - "CAMPAIGN_ENTITY_STATUS" + - "CAMPAIGN_ID" + - "CAMPAIGN_LIFETIME_SPEND_CAP" + - "CAMPAIGN_NAME" + - "CHECKOUT_ROAS" + - "CLICKTHROUGH_1" + - "CLICKTHROUGH_1_GROSS" + - "CLICKTHROUGH_2" + - "CPC_IN_MICRO_DOLLAR" + - "CPM_IN_DOLLAR" + - "CPM_IN_MICRO_DOLLAR" + - "CTR" + - "CTR_2" + - "ECPCV_IN_DOLLAR" + - "ECPCV_P95_IN_DOLLAR" + - "ECPC_IN_DOLLAR" + - "ECPC_IN_MICRO_DOLLAR" + - "ECPE_IN_DOLLAR" + - "ECPM_IN_MICRO_DOLLAR" + - "ECPV_IN_DOLLAR" + - "ECTR" + - "EENGAGEMENT_RATE" + - "ENGAGEMENT_1" + - "ENGAGEMENT_2" + - "ENGAGEMENT_RATE" + - "IDEA_PIN_PRODUCT_TAG_VISIT_1" + - "IDEA_PIN_PRODUCT_TAG_VISIT_2" + - "IMPRESSION_1" + - "IMPRESSION_1_GROSS" + - "IMPRESSION_2" + - "INAPP_CHECKOUT_COST_PER_ACTION" + - "OUTBOUND_CLICK_1" + - "OUTBOUND_CLICK_2" + - "PAGE_VISIT_COST_PER_ACTION" + - "PAGE_VISIT_ROAS" + - "PAID_IMPRESSION" + - "PIN_ID" + - "PIN_PROMOTION_ID" + - "REPIN_1" + - "REPIN_2" + - "REPIN_RATE" + - "SPEND_IN_DOLLAR" + - "SPEND_IN_MICRO_DOLLAR" + - "TOTAL_CHECKOUT" + - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICKTHROUGH" + - "TOTAL_CLICK_ADD_TO_CART" + - "TOTAL_CLICK_CHECKOUT" + - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICK_LEAD" + - "TOTAL_CLICK_SIGNUP" + - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CONVERSIONS" + - "TOTAL_CUSTOM" + - "TOTAL_ENGAGEMENT" + - "TOTAL_ENGAGEMENT_CHECKOUT" + - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_ENGAGEMENT_LEAD" + - "TOTAL_ENGAGEMENT_SIGNUP" + - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT" + - "TOTAL_IMPRESSION_FREQUENCY" + - "TOTAL_IMPRESSION_USER" + - "TOTAL_LEAD" + - "TOTAL_OFFLINE_CHECKOUT" + - "TOTAL_PAGE_VISIT" + - "TOTAL_REPIN_RATE" + - "TOTAL_SIGNUP" + - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIDEO_3SEC_VIEWS" + - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND" + - "TOTAL_VIDEO_MRC_VIEWS" + - "TOTAL_VIDEO_P0_COMBINED" + - "TOTAL_VIDEO_P100_COMPLETE" + - "TOTAL_VIDEO_P25_COMBINED" + - "TOTAL_VIDEO_P50_COMBINED" + - "TOTAL_VIDEO_P75_COMBINED" + - "TOTAL_VIDEO_P95_COMBINED" + - "TOTAL_VIEW_ADD_TO_CART" + - "TOTAL_VIEW_CHECKOUT" + - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIEW_LEAD" + - "TOTAL_VIEW_SIGNUP" + - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CHECKOUT" + - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CLICK_CHECKOUT" + - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_SESSIONS" + - "TOTAL_WEB_VIEW_CHECKOUT" + - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "VIDEO_3SEC_VIEWS_2" + - "VIDEO_LENGTH" + - "VIDEO_MRC_VIEWS_2" + - "VIDEO_P0_COMBINED_2" + - "VIDEO_P100_COMPLETE_2" + - "VIDEO_P25_COMBINED_2" + - "VIDEO_P50_COMBINED_2" + - "VIDEO_P75_COMBINED_2" + - "VIDEO_P95_COMBINED_2" + - "WEB_CHECKOUT_COST_PER_ACTION" + - "WEB_CHECKOUT_ROAS" + - "WEB_SESSIONS_1" + - "WEB_SESSIONS_2" + click_window_days: + title: "Click window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a pin click action." + default: 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 4 + engagement_window_days: + title: "Engagement window days" + description: + "Number of days to use as the conversion attribution\ + \ window for an engagement action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 5 + view_window_days: + title: "View window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a view action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 6 + conversion_report_time: + title: "Conversion report time" + description: + "The date by which the conversion metrics returned from\ + \ this endpoint will be reported. There are two dates associated\ + \ with a conversion event: the date that the user interacted with\ + \ the ad, and the date that the user completed a conversion event.." + default: "TIME_OF_AD_ACTION" + enum: + - "TIME_OF_AD_ACTION" + - "TIME_OF_CONVERSION" + type: "string" + order: 7 + attribution_types: + title: "Attribution types" + description: "List of types of attribution for the conversion report" + default: + - "INDIVIDUAL" + - "HOUSEHOLD" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "INDIVIDUAL" + - "HOUSEHOLD" + order: 8 + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set\ + \ a date, it would be defaulted to latest allowed date by report\ + \ api (913 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + order: 9 + sourceType: + title: "pinterest" + const: "pinterest" + enum: + - "pinterest" + order: 0 + type: "string" + source-pinterest-update: + title: "Pinterest Spec" + type: "object" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set a date,\ + \ it would be defaulted to latest allowed date by api (89 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + status: + title: "Status" + description: + "For the ads, ad_groups, and campaigns streams, specifying\ + \ a status will filter out records that do not match the specified ones.\ + \ If a status is not specified, the source will default to records with\ + \ a status of either ACTIVE or PAUSED." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true + credentials: + title: "OAuth2.0" + type: "object" + required: + - "auth_method" + - "refresh_token" + - "client_id" + - "client_secret" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to obtain new Access Token, when it's expired." + airbyte_secret: true + custom_reports: + title: "Custom Reports" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + type: "array" + items: + title: "ReportConfig" + description: "Config for custom report" + type: "object" + required: + - "name" + - "level" + - "granularity" + - "columns" + properties: + name: + title: "Name" + description: "The name value of report" + type: "string" + order: 0 + level: + title: "Level" + description: "Chosen level for API" + default: "ADVERTISER" + enum: + - "ADVERTISER" + - "ADVERTISER_TARGETING" + - "CAMPAIGN" + - "CAMPAIGN_TARGETING" + - "AD_GROUP" + - "AD_GROUP_TARGETING" + - "PIN_PROMOTION" + - "PIN_PROMOTION_TARGETING" + - "KEYWORD" + - "PRODUCT_GROUP" + - "PRODUCT_GROUP_TARGETING" + - "PRODUCT_ITEM" + type: "string" + order: 1 + granularity: + title: "Granularity" + description: "Chosen granularity for API" + default: "TOTAL" + enum: + - "TOTAL" + - "DAY" + - "HOUR" + - "WEEK" + - "MONTH" + type: "string" + order: 2 + columns: + title: "Columns" + description: "A list of chosen columns" + default: [] + type: "array" + order: 3 + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "ADVERTISER_ID" + - "AD_ACCOUNT_ID" + - "AD_GROUP_ENTITY_STATUS" + - "AD_GROUP_ID" + - "AD_ID" + - "CAMPAIGN_DAILY_SPEND_CAP" + - "CAMPAIGN_ENTITY_STATUS" + - "CAMPAIGN_ID" + - "CAMPAIGN_LIFETIME_SPEND_CAP" + - "CAMPAIGN_NAME" + - "CHECKOUT_ROAS" + - "CLICKTHROUGH_1" + - "CLICKTHROUGH_1_GROSS" + - "CLICKTHROUGH_2" + - "CPC_IN_MICRO_DOLLAR" + - "CPM_IN_DOLLAR" + - "CPM_IN_MICRO_DOLLAR" + - "CTR" + - "CTR_2" + - "ECPCV_IN_DOLLAR" + - "ECPCV_P95_IN_DOLLAR" + - "ECPC_IN_DOLLAR" + - "ECPC_IN_MICRO_DOLLAR" + - "ECPE_IN_DOLLAR" + - "ECPM_IN_MICRO_DOLLAR" + - "ECPV_IN_DOLLAR" + - "ECTR" + - "EENGAGEMENT_RATE" + - "ENGAGEMENT_1" + - "ENGAGEMENT_2" + - "ENGAGEMENT_RATE" + - "IDEA_PIN_PRODUCT_TAG_VISIT_1" + - "IDEA_PIN_PRODUCT_TAG_VISIT_2" + - "IMPRESSION_1" + - "IMPRESSION_1_GROSS" + - "IMPRESSION_2" + - "INAPP_CHECKOUT_COST_PER_ACTION" + - "OUTBOUND_CLICK_1" + - "OUTBOUND_CLICK_2" + - "PAGE_VISIT_COST_PER_ACTION" + - "PAGE_VISIT_ROAS" + - "PAID_IMPRESSION" + - "PIN_ID" + - "PIN_PROMOTION_ID" + - "REPIN_1" + - "REPIN_2" + - "REPIN_RATE" + - "SPEND_IN_DOLLAR" + - "SPEND_IN_MICRO_DOLLAR" + - "TOTAL_CHECKOUT" + - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICKTHROUGH" + - "TOTAL_CLICK_ADD_TO_CART" + - "TOTAL_CLICK_CHECKOUT" + - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICK_LEAD" + - "TOTAL_CLICK_SIGNUP" + - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CONVERSIONS" + - "TOTAL_CUSTOM" + - "TOTAL_ENGAGEMENT" + - "TOTAL_ENGAGEMENT_CHECKOUT" + - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_ENGAGEMENT_LEAD" + - "TOTAL_ENGAGEMENT_SIGNUP" + - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT" + - "TOTAL_IMPRESSION_FREQUENCY" + - "TOTAL_IMPRESSION_USER" + - "TOTAL_LEAD" + - "TOTAL_OFFLINE_CHECKOUT" + - "TOTAL_PAGE_VISIT" + - "TOTAL_REPIN_RATE" + - "TOTAL_SIGNUP" + - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIDEO_3SEC_VIEWS" + - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND" + - "TOTAL_VIDEO_MRC_VIEWS" + - "TOTAL_VIDEO_P0_COMBINED" + - "TOTAL_VIDEO_P100_COMPLETE" + - "TOTAL_VIDEO_P25_COMBINED" + - "TOTAL_VIDEO_P50_COMBINED" + - "TOTAL_VIDEO_P75_COMBINED" + - "TOTAL_VIDEO_P95_COMBINED" + - "TOTAL_VIEW_ADD_TO_CART" + - "TOTAL_VIEW_CHECKOUT" + - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIEW_LEAD" + - "TOTAL_VIEW_SIGNUP" + - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CHECKOUT" + - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CLICK_CHECKOUT" + - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_SESSIONS" + - "TOTAL_WEB_VIEW_CHECKOUT" + - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "VIDEO_3SEC_VIEWS_2" + - "VIDEO_LENGTH" + - "VIDEO_MRC_VIEWS_2" + - "VIDEO_P0_COMBINED_2" + - "VIDEO_P100_COMPLETE_2" + - "VIDEO_P25_COMBINED_2" + - "VIDEO_P50_COMBINED_2" + - "VIDEO_P75_COMBINED_2" + - "VIDEO_P95_COMBINED_2" + - "WEB_CHECKOUT_COST_PER_ACTION" + - "WEB_CHECKOUT_ROAS" + - "WEB_SESSIONS_1" + - "WEB_SESSIONS_2" + click_window_days: + title: "Click window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a pin click action." + default: 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 4 + engagement_window_days: + title: "Engagement window days" + description: + "Number of days to use as the conversion attribution\ + \ window for an engagement action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 5 + view_window_days: + title: "View window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a view action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 6 + conversion_report_time: + title: "Conversion report time" + description: + "The date by which the conversion metrics returned from\ + \ this endpoint will be reported. There are two dates associated\ + \ with a conversion event: the date that the user interacted with\ + \ the ad, and the date that the user completed a conversion event.." + default: "TIME_OF_AD_ACTION" + enum: + - "TIME_OF_AD_ACTION" + - "TIME_OF_CONVERSION" + type: "string" + order: 7 + attribution_types: + title: "Attribution types" + description: "List of types of attribution for the conversion report" + default: + - "INDIVIDUAL" + - "HOUSEHOLD" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "INDIVIDUAL" + - "HOUSEHOLD" + order: 8 + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set\ + \ a date, it would be defaulted to latest allowed date by report\ + \ api (913 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + order: 9 + source-spacex-api: + type: "object" + required: + - "sourceType" + properties: + id: + type: "string" + title: "Unique ID for specific source target" + desciption: "Optional, For a specific ID" + order: 0 + options: + type: "string" + title: "Configuration options for endpoints" + desciption: + "Optional, Possible values for an endpoint. Example values for\ + \ launches-latest, upcoming, past" + order: 1 + sourceType: + title: "spacex-api" + const: "spacex-api" + enum: + - "spacex-api" + order: 0 + type: "string" + source-spacex-api-update: + type: "object" + required: [] + properties: + id: + type: "string" + title: "Unique ID for specific source target" + desciption: "Optional, For a specific ID" + order: 0 + options: + type: "string" + title: "Configuration options for endpoints" + desciption: + "Optional, Possible values for an endpoint. Example values for\ + \ launches-latest, upcoming, past" + order: 1 + source-bamboo-hr: + title: "Bamboo HR Spec" + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + description: "Api key of bamboo hr" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + order: 1 + title: "subdomain" + description: "Sub Domain of bamboo hr" + custom_reports_fields: + type: "string" + order: 2 + title: "custom_reports_fields" + description: "Comma-separated list of fields to include in custom reports." + custom_reports_include_default_fields: + title: "custom_reports_include_default_fields" + description: + "If true, the custom reports endpoint will include the default\ + \ fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." + type: "boolean" + default: true + order: 3 + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "bamboo-hr" + const: "bamboo-hr" + enum: + - "bamboo-hr" + order: 0 + type: "string" + source-bamboo-hr-update: + title: "Bamboo HR Spec" + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + description: "Api key of bamboo hr" + airbyte_secret: true + subdomain: + type: "string" + order: 1 + title: "subdomain" + description: "Sub Domain of bamboo hr" + custom_reports_fields: + type: "string" + order: 2 + title: "custom_reports_fields" + description: "Comma-separated list of fields to include in custom reports." + custom_reports_include_default_fields: + title: "custom_reports_include_default_fields" + description: + "If true, the custom reports endpoint will include the default\ + \ fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." + type: "boolean" + default: true + order: 3 + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-freshchat: + type: "object" + required: + - "account_name" + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + account_name: + type: "string" + description: "The unique account name for your Freshchat instance" + name: "account_name" + order: 0 + title: "Account Name" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "freshchat" + const: "freshchat" + enum: + - "freshchat" + order: 0 + type: "string" + source-freshchat-update: + type: "object" + required: + - "account_name" + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + account_name: + type: "string" + description: "The unique account name for your Freshchat instance" + name: "account_name" + order: 0 + title: "Account Name" + airbyte_secret: false + source-okta: + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "OAuth 2.0 with private key" + required: + - "auth_type" + - "client_id" + - "key_id" + - "private_key" + - "scope" + properties: + auth_type: + type: "string" + const: "oauth2.0_private_key" + order: 0 + enum: + - "oauth2.0_private_key" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + key_id: + type: "string" + title: "Key ID" + description: "The key ID (kid)." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + private_key: + type: "string" + title: "Private key" + description: "The private key in PEM format" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + scope: + type: "string" + title: "Scope" + description: "The OAuth scope." + order: 4 + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: + "An Okta token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + domain: + type: "string" + title: "Okta domain" + description: + "The Okta domain. See the docs for instructions on how to find it." + airbyte_secret: false + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any\ + \ data before this date will not be replicated." + examples: + - "2022-07-22T00:00:00Z" + order: 2 + sourceType: + title: "okta" + const: "okta" + enum: + - "okta" + order: 0 + type: "string" + source-okta-update: + type: "object" + required: [] + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + - type: "object" + title: "OAuth 2.0 with private key" + required: + - "auth_type" + - "client_id" + - "key_id" + - "private_key" + - "scope" + properties: + auth_type: + type: "string" + const: "oauth2.0_private_key" + order: 0 + enum: + - "oauth2.0_private_key" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + order: 1 + key_id: + type: "string" + title: "Key ID" + description: "The key ID (kid)." + airbyte_secret: true + order: 2 + private_key: + type: "string" + title: "Private key" + description: "The private key in PEM format" + airbyte_secret: true + order: 3 + scope: + type: "string" + title: "Scope" + description: "The OAuth scope." + order: 4 + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: + "An Okta token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Okta domain" + description: + "The Okta domain. See the docs for instructions on how to find it." + airbyte_secret: false + order: 1 + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any\ + \ data before this date will not be replicated." + examples: + - "2022-07-22T00:00:00Z" + order: 2 + source-hibob: + type: "object" + required: + - "username" + - "is_sandbox" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + is_sandbox: + type: "boolean" + description: "Toggle true if this instance is a HiBob sandbox " + order: 2 + title: "Is Sandbox" + sourceType: + title: "hibob" + const: "hibob" + enum: + - "hibob" + order: 0 + type: "string" + source-hibob-update: + type: "object" + required: + - "username" + - "is_sandbox" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + is_sandbox: + type: "boolean" + description: "Toggle true if this instance is a HiBob sandbox " + order: 2 + title: "Is Sandbox" + source-mixpanel: + title: "Source Mixpanel Spec" + required: + - "credentials" + - "sourceType" + type: "object" + properties: + credentials: + title: "Authentication *" + description: "Choose how to authenticate to Mixpanel" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "Service Account" + required: + - "username" + - "secret" + - "project_id" + properties: + option_title: + type: "string" + const: "Service Account" + order: 0 + enum: + - "Service Account" + username: + order: 1 + title: "Username" + type: "string" + description: + "Mixpanel Service Account Username. See the docs\ + \ for more information on how to obtain this." + secret: + order: 2 + title: "Secret" + type: "string" + description: + "Mixpanel Service Account Secret. See the docs\ + \ for more information on how to obtain this." + airbyte_secret: true + x-speakeasy-param-sensitive: true + project_id: + order: 3 + title: "Project ID" + description: + "Your project ID number. See the docs for more information on how to obtain this." + type: "integer" + - type: "object" + title: "Project Secret" + required: + - "api_secret" + properties: + option_title: + type: "string" + const: "Project Secret" + order: 0 + enum: + - "Project Secret" + api_secret: + order: 1 + title: "Project Secret" + type: "string" + description: + "Mixpanel project secret. See the docs for more information on how to obtain this." + airbyte_secret: true + x-speakeasy-param-sensitive: true + attribution_window: + order: 2 + title: "Attribution Window" + type: "integer" + description: + "A period of time for attributing results to ads and the lookback\ + \ period after those actions occur during which ad results are counted.\ + \ Default attribution window is 5 days. (This value should be non-negative\ + \ integer)" + default: 5 + project_timezone: + order: 3 + title: "Project Timezone" + type: "string" + description: + "Time zone in which integer date times are stored. The project\ + \ timezone may be found in the project settings in the Mixpanel console." + default: "US/Pacific" + examples: + - "US/Pacific" + - "UTC" + select_properties_by_default: + order: 4 + title: "Select Properties By Default" + type: "boolean" + description: + "Setting this config parameter to TRUE ensures that new properties\ + \ on events and engage records are captured. Otherwise new properties\ + \ will be ignored." + default: true + start_date: + order: 5 + title: "Start Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If this option is not set, the connector will\ + \ replicate data from up to one year ago by default." + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + end_date: + order: 6 + title: "End Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. Left empty to always sync to most recent date" + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + region: + order: 7 + title: "Region" + description: "The region of mixpanel domain instance either US or EU." + type: "string" + enum: + - "US" + - "EU" + default: "US" + date_window_size: + order: 8 + title: "Date slicing window" + description: + "Defines window size in days, that used to slice through data.\ + \ You can reduce it, if amount of data in each window is too big for your\ + \ environment. (This value should be positive integer)" + type: "integer" + minimum: 1 + default: 30 + page_size: + order: 9 + title: "Page Size" + description: + "The number of records to fetch per request for the engage\ + \ stream. Default is 1000. If you are experiencing long sync times with\ + \ this stream, try increasing this value." + type: "integer" + minimum: 1 + default: 1000 + sourceType: + title: "mixpanel" + const: "mixpanel" + enum: + - "mixpanel" + order: 0 + type: "string" + source-mixpanel-update: + title: "Source Mixpanel Spec" + required: + - "credentials" + type: "object" + properties: + credentials: + title: "Authentication *" + description: "Choose how to authenticate to Mixpanel" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "Service Account" + required: + - "username" + - "secret" + - "project_id" + properties: + option_title: + type: "string" + const: "Service Account" + order: 0 + enum: + - "Service Account" + username: + order: 1 + title: "Username" + type: "string" + description: + "Mixpanel Service Account Username. See the docs\ + \ for more information on how to obtain this." + secret: + order: 2 + title: "Secret" + type: "string" + description: + "Mixpanel Service Account Secret. See the docs\ + \ for more information on how to obtain this." + airbyte_secret: true + project_id: + order: 3 + title: "Project ID" + description: + "Your project ID number. See the docs for more information on how to obtain this." + type: "integer" + - type: "object" + title: "Project Secret" + required: + - "api_secret" + properties: + option_title: + type: "string" + const: "Project Secret" + order: 0 + enum: + - "Project Secret" + api_secret: + order: 1 + title: "Project Secret" + type: "string" + description: + "Mixpanel project secret. See the docs for more information on how to obtain this." + airbyte_secret: true + attribution_window: + order: 2 + title: "Attribution Window" + type: "integer" + description: + "A period of time for attributing results to ads and the lookback\ + \ period after those actions occur during which ad results are counted.\ + \ Default attribution window is 5 days. (This value should be non-negative\ + \ integer)" + default: 5 + project_timezone: + order: 3 + title: "Project Timezone" + type: "string" + description: + "Time zone in which integer date times are stored. The project\ + \ timezone may be found in the project settings in the Mixpanel console." + default: "US/Pacific" + examples: + - "US/Pacific" + - "UTC" + select_properties_by_default: + order: 4 + title: "Select Properties By Default" + type: "boolean" + description: + "Setting this config parameter to TRUE ensures that new properties\ + \ on events and engage records are captured. Otherwise new properties\ + \ will be ignored." + default: true + start_date: + order: 5 + title: "Start Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If this option is not set, the connector will\ + \ replicate data from up to one year ago by default." + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + end_date: + order: 6 + title: "End Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. Left empty to always sync to most recent date" + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + region: + order: 7 + title: "Region" + description: "The region of mixpanel domain instance either US or EU." + type: "string" + enum: + - "US" + - "EU" + default: "US" + date_window_size: + order: 8 + title: "Date slicing window" + description: + "Defines window size in days, that used to slice through data.\ + \ You can reduce it, if amount of data in each window is too big for your\ + \ environment. (This value should be positive integer)" + type: "integer" + minimum: 1 + default: 30 + page_size: + order: 9 + title: "Page Size" + description: + "The number of records to fetch per request for the engage\ + \ stream. Default is 1000. If you are experiencing long sync times with\ + \ this stream, try increasing this value." + type: "integer" + minimum: 1 + default: 1000 + source-ip2whois: + type: "object" + required: + - "sourceType" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your API Key. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + domain: + type: "string" + title: "Domain" + description: + "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + order: 1 + sourceType: + title: "ip2whois" + const: "ip2whois" + enum: + - "ip2whois" + order: 0 + type: "string" + source-ip2whois-update: + type: "object" + required: [] + properties: + api_key: + type: "string" + title: "API key" + description: + "Your API Key. See here." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Domain" + description: + "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + order: 1 + source-twitter: + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + description: + "App only Bearer Token. See the docs for more information on how to obtain this token." + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + query: + type: "string" + description: + "Query for matching Tweets. You can learn how to build this\ + \ query by reading build a query guide ." + title: "Search Query" + order: 1 + start_date: + type: "string" + description: + "The start date for retrieving tweets cannot be more than 7\ + \ days in the past." + title: "Start Date" + format: "date-time" + order: 2 + end_date: + type: "string" + description: + "The end date for retrieving tweets must be a minimum of 10\ + \ seconds prior to the request time." + title: "End Date" + format: "date-time" + order: 3 + sourceType: + title: "twitter" + const: "twitter" + enum: + - "twitter" + order: 0 + type: "string" + source-twitter-update: + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + description: + "App only Bearer Token. See the docs for more information on how to obtain this token." + title: "Access Token" + airbyte_secret: true + order: 0 + query: + type: "string" + description: + "Query for matching Tweets. You can learn how to build this\ + \ query by reading build a query guide ." + title: "Search Query" + order: 1 + start_date: + type: "string" + description: + "The start date for retrieving tweets cannot be more than 7\ + \ days in the past." + title: "Start Date" + format: "date-time" + order: 2 + end_date: + type: "string" + description: + "The end date for retrieving tweets must be a minimum of 10\ + \ seconds prior to the request time." + title: "End Date" + format: "date-time" + order: 3 + source-sftp-bulk: + title: "SFTP Bulk Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + host: + title: "Host Address" + description: "The server host address" + examples: + - "www.host.com" + - "192.0.2.1" + order: 2 + type: "string" + username: + title: "User Name" + description: "The server user" + order: 3 + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the SFTP Server" + type: "object" + order: 4 + oneOf: + - title: "Authenticate via Password" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "password" + const: "password" + enum: + - "password" + type: "string" + password: + title: "Password" + description: "Password" + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + - "auth_type" + - title: "Authenticate via Private Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "private_key" + const: "private_key" + enum: + - "private_key" + type: "string" + private_key: + title: "Private key" + description: "The Private key" + multiline: true + order: 4 + type: "string" + required: + - "private_key" + - "auth_type" + port: + title: "Host Address" + description: "The server port" + default: 22 + examples: + - "22" + order: 5 + type: "integer" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + default: "/" + examples: + - "/logs/2022" + order: 6 + pattern_descriptor: "/folder_to_sync" + type: "string" + sourceType: + title: "sftp-bulk" + const: "sftp-bulk" + enum: + - "sftp-bulk" + order: 0 + type: "string" + required: + - "streams" + - "host" + - "username" + - "credentials" + - "sourceType" + source-sftp-bulk-update: + title: "SFTP Bulk Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + host: + title: "Host Address" + description: "The server host address" + examples: + - "www.host.com" + - "192.0.2.1" + order: 2 + type: "string" + username: + title: "User Name" + description: "The server user" + order: 3 + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the SFTP Server" + type: "object" + order: 4 + oneOf: + - title: "Authenticate via Password" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "password" + const: "password" + enum: + - "password" + type: "string" + password: + title: "Password" + description: "Password" + airbyte_secret: true + order: 3 + type: "string" + required: + - "password" + - "auth_type" + - title: "Authenticate via Private Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "private_key" + const: "private_key" + enum: + - "private_key" + type: "string" + private_key: + title: "Private key" + description: "The Private key" + multiline: true + order: 4 + type: "string" + required: + - "private_key" + - "auth_type" + port: + title: "Host Address" + description: "The server port" + default: 22 + examples: + - "22" + order: 5 + type: "integer" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + default: "/" + examples: + - "/logs/2022" + order: 6 + pattern_descriptor: "/folder_to_sync" + type: "string" + required: + - "streams" + - "host" + - "username" + - "credentials" + source-zendesk-support: + title: "Source Zendesk Support Spec" + type: "object" + required: + - "subdomain" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The UTC date and time from which you'd like to replicate data,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2020-10-15T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + format: "date-time" + order: 2 + subdomain: + type: "string" + title: "Subdomain" + description: + "This is your unique Zendesk subdomain that can be found in\ + \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ + \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 + credentials: + title: "Authentication" + type: "object" + description: + "Zendesk allows two authentication methods. We recommend using\ + \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ + \ Source users." + order: 1 + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The OAuth access token. See the Zendesk docs for more information on generating this token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See our full documentation for more information on generating this\ + \ token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zendesk-support" + const: "zendesk-support" + enum: + - "zendesk-support" + order: 0 + type: "string" + source-zendesk-support-update: + title: "Source Zendesk Support Spec" + type: "object" + required: + - "subdomain" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The UTC date and time from which you'd like to replicate data,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2020-10-15T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + format: "date-time" + order: 2 + subdomain: + type: "string" + title: "Subdomain" + description: + "This is your unique Zendesk subdomain that can be found in\ + \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ + \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 + credentials: + title: "Authentication" + type: "object" + description: + "Zendesk allows two authentication methods. We recommend using\ + \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ + \ Source users." + order: 1 + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The OAuth access token. See the Zendesk docs for more information on generating this token." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See our full documentation for more information on generating this\ + \ token." + airbyte_secret: true + source-microsoft-onedrive: + title: "Microsoft OneDrive Source Spec" + description: + "SourceMicrosoftOneDriveSpec class for Microsoft OneDrive Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the OneDrive API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + drive_name: + title: "Drive Name" + description: "Name of the Microsoft OneDrive drive where the file(s) exist." + default: "OneDrive" + order: 2 + type: "string" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS'\ + \ for shared items the user has access to, and 'ALL' to search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + sourceType: + title: "microsoft-onedrive" + const: "microsoft-onedrive" + enum: + - "microsoft-onedrive" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "sourceType" + source-microsoft-onedrive-update: + title: "Microsoft OneDrive Source Spec" + description: + "SourceMicrosoftOneDriveSpec class for Microsoft OneDrive Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the OneDrive API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + drive_name: + title: "Drive Name" + description: "Name of the Microsoft OneDrive drive where the file(s) exist." + default: "OneDrive" + order: 2 + type: "string" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS'\ + \ for shared items the user has access to, and 'ALL' to search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + required: + - "streams" + - "credentials" + source-appfigures: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + search_store: + type: "string" + description: "The store which needs to be searched in streams" + title: "Search Store" + default: "apple" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + group_by: + type: "string" + description: "Category term for grouping the search results" + title: "Group by" + default: "product" + enum: + - "network" + - "product" + - "country" + - "date" + order: 3 + sourceType: + title: "appfigures" + const: "appfigures" + enum: + - "appfigures" + order: 0 + type: "string" + source-appfigures-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + search_store: + type: "string" + description: "The store which needs to be searched in streams" + title: "Search Store" + default: "apple" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + group_by: + type: "string" + description: "Category term for grouping the search results" + title: "Group by" + default: "product" + enum: + - "network" + - "product" + - "country" + - "date" + order: 3 + source-tiktok-marketing: + title: "TikTok Marketing Source Spec" + type: "object" + properties: + credentials: + title: "Authentication Method" + description: "Authentication method" + default: {} + order: 0 + type: "object" + oneOf: + - title: "OAuth2.0" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + description: "Long-term Authorized Access Token." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID to filter reports and streams. Let\ + \ this empty to retrieve all." + type: "string" + required: + - "app_id" + - "secret" + - "access_token" + - title: "Sandbox Access Token" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "sandbox_access_token" + order: 0 + type: "string" + enum: + - "sandbox_access_token" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID which generated for the developer's\ + \ Sandbox application." + type: "string" + access_token: + title: "Access Token" + description: "The long-term authorized access token." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "advertiser_id" + - "access_token" + start_date: + title: "Replication Start Date" + description: + "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + format: "date" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DD. All data generated between\ + \ start_date and this date will be replicated. Not setting this option\ + \ will result in always syncing the data till the current date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + type: "string" + format: "date" + attribution_window: + title: "Attribution Window" + description: "The attribution window in days." + minimum: 0 + maximum: 364 + default: 3 + order: 3 + type: "integer" + include_deleted: + title: + "Include Deleted Data in Reports and Ads, Ad Groups and Campaign\ + \ streams." + description: + "Set to active if you want to include deleted data in report\ + \ based streams and Ads, Ad Groups and Campaign streams." + default: false + order: 4 + type: "boolean" + sourceType: + title: "tiktok-marketing" + const: "tiktok-marketing" + enum: + - "tiktok-marketing" + order: 0 + type: "string" + source-tiktok-marketing-update: + title: "TikTok Marketing Source Spec" + type: "object" + properties: + credentials: + title: "Authentication Method" + description: "Authentication method" + default: {} + order: 0 + type: "object" + oneOf: + - title: "OAuth2.0" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + access_token: + title: "Access Token" + description: "Long-term Authorized Access Token." + airbyte_secret: true + type: "string" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID to filter reports and streams. Let\ + \ this empty to retrieve all." + type: "string" + required: + - "app_id" + - "secret" + - "access_token" + - title: "Sandbox Access Token" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "sandbox_access_token" + order: 0 + type: "string" + enum: + - "sandbox_access_token" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID which generated for the developer's\ + \ Sandbox application." + type: "string" + access_token: + title: "Access Token" + description: "The long-term authorized access token." + airbyte_secret: true + type: "string" + required: + - "advertiser_id" + - "access_token" + start_date: + title: "Replication Start Date" + description: + "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + format: "date" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DD. All data generated between\ + \ start_date and this date will be replicated. Not setting this option\ + \ will result in always syncing the data till the current date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + type: "string" + format: "date" + attribution_window: + title: "Attribution Window" + description: "The attribution window in days." + minimum: 0 + maximum: 364 + default: 3 + order: 3 + type: "integer" + include_deleted: + title: + "Include Deleted Data in Reports and Ads, Ad Groups and Campaign\ + \ streams." + description: + "Set to active if you want to include deleted data in report\ + \ based streams and Ads, Ad Groups and Campaign streams." + default: false + order: 4 + type: "boolean" + source-aws-cloudtrail: + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + - "sourceType" + properties: + aws_key_id: + type: "string" + title: "Key ID" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_secret_key: + type: "string" + title: "Secret Key" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_region_name: + type: "string" + title: "Region Name" + description: + "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + default: "us-east-1" + start_date: + type: "string" + title: "Start Date" + description: + "The date you would like to replicate data. Data in AWS CloudTrail\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + lookup_attributes_filter: + title: + "Filter applied while fetching records based on AttributeKey and\ + \ AttributeValue which will be appended on the request body" + type: "object" + required: + - "attribute_key" + - "attribute_value" + properties: + attribute_key: + type: "string" + title: "Attribute Key from the response to filter" + examples: + - "EventName" + default: "EventName" + attribute_value: + type: "string" + title: "Corresponding value to the given attribute key" + examples: + - "ListInstanceAssociations" + - "ConsoleLogin" + default: "ListInstanceAssociations" + sourceType: + title: "aws-cloudtrail" + const: "aws-cloudtrail" + enum: + - "aws-cloudtrail" + order: 0 + type: "string" + source-aws-cloudtrail-update: + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + properties: + aws_key_id: + type: "string" + title: "Key ID" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + aws_secret_key: + type: "string" + title: "Secret Key" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + aws_region_name: + type: "string" + title: "Region Name" + description: + "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + default: "us-east-1" + start_date: + type: "string" + title: "Start Date" + description: + "The date you would like to replicate data. Data in AWS CloudTrail\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + lookup_attributes_filter: + title: + "Filter applied while fetching records based on AttributeKey and\ + \ AttributeValue which will be appended on the request body" + type: "object" + required: + - "attribute_key" + - "attribute_value" + properties: + attribute_key: + type: "string" + title: "Attribute Key from the response to filter" + examples: + - "EventName" + default: "EventName" + attribute_value: + type: "string" + title: "Corresponding value to the given attribute key" + examples: + - "ListInstanceAssociations" + - "ConsoleLogin" + default: "ListInstanceAssociations" + source-jira: + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + - "sourceType" + properties: + api_token: + type: "string" + title: "API Token" + description: + "Jira API Token. See the docs for more information on how to generate this key. API Token\ + \ is used for Authorization to your account by BasicAuth." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + domain: + type: "string" + title: "Domain" + examples: + - ".atlassian.net" + - ".jira.com" + - "jira..com" + description: + "The Domain for your Jira account, e.g. airbyteio.atlassian.net,\ + \ airbyteio.jira.com, jira.your-domain.com" + order: 1 + email: + type: "string" + title: "Email" + description: + "The user email for your Jira account which you used to generate\ + \ the API token. This field is used for Authorization to your account\ + \ by BasicAuth." + order: 2 + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: + "List of Jira project keys to replicate data for, or leave\ + \ it empty if you want to replicate data for all projects." + order: 3 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you want to replicate data from Jira,\ + \ use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies\ + \ to certain streams, and only data generated on or after the start date\ + \ will be replicated. Or leave it empty if you want to replicate all data.\ + \ For more information, refer to the documentation." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 4 + lookback_window_minutes: + title: "Lookback window" + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N minutes. By default, updated objects that\ + \ are not newly created are not incrementally synced." + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 5 + enable_experimental_streams: + type: "boolean" + title: "Enable Experimental Streams" + description: + "Allow the use of experimental streams which rely on undocumented\ + \ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\ + \ for more info." + default: false + order: 6 + sourceType: + title: "jira" + const: "jira" + enum: + - "jira" + order: 0 + type: "string" + source-jira-update: + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + properties: + api_token: + type: "string" + title: "API Token" + description: + "Jira API Token. See the docs for more information on how to generate this key. API Token\ + \ is used for Authorization to your account by BasicAuth." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Domain" + examples: + - ".atlassian.net" + - ".jira.com" + - "jira..com" + description: + "The Domain for your Jira account, e.g. airbyteio.atlassian.net,\ + \ airbyteio.jira.com, jira.your-domain.com" + order: 1 + email: + type: "string" + title: "Email" + description: + "The user email for your Jira account which you used to generate\ + \ the API token. This field is used for Authorization to your account\ + \ by BasicAuth." + order: 2 + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: + "List of Jira project keys to replicate data for, or leave\ + \ it empty if you want to replicate data for all projects." + order: 3 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you want to replicate data from Jira,\ + \ use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies\ + \ to certain streams, and only data generated on or after the start date\ + \ will be replicated. Or leave it empty if you want to replicate all data.\ + \ For more information, refer to the documentation." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 4 + lookback_window_minutes: + title: "Lookback window" + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N minutes. By default, updated objects that\ + \ are not newly created are not incrementally synced." + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 5 + enable_experimental_streams: + type: "boolean" + title: "Enable Experimental Streams" + description: + "Allow the use of experimental streams which rely on undocumented\ + \ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\ + \ for more info." + default: false + order: 6 + source-hubspot: + title: "HubSpot Source Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If not set, \"2006-06-01T00:00:00Z\"\ + \ (Hubspot creation date) will be used as start date. It's recommended\ + \ to provide relevant to your data start date value to optimize synchronization." + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + credentials: + title: "Authentication" + description: "Choose how to authenticate to HubSpot." + type: "object" + oneOf: + - type: "object" + title: "OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Refresh token to renew an expired access token. See\ + \ the Hubspot docs if you need help finding this token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Private App" + required: + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials set" + const: "Private App Credentials" + order: 0 + enum: + - "Private App Credentials" + access_token: + title: "Access token" + description: + "HubSpot Access token. See the Hubspot docs if you need help finding this token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + enable_experimental_streams: + title: "Enable experimental streams" + description: + "If enabled then experimental streams become available for\ + \ sync." + type: "boolean" + default: false + sourceType: + title: "hubspot" + const: "hubspot" + enum: + - "hubspot" + order: 0 + type: "string" + source-hubspot-update: + title: "HubSpot Source Spec" + type: "object" + required: + - "credentials" + properties: + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If not set, \"2006-06-01T00:00:00Z\"\ + \ (Hubspot creation date) will be used as start date. It's recommended\ + \ to provide relevant to your data start date value to optimize synchronization." + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + credentials: + title: "Authentication" + description: "Choose how to authenticate to HubSpot." + type: "object" + oneOf: + - type: "object" + title: "OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + description: + "Refresh token to renew an expired access token. See\ + \ the Hubspot docs if you need help finding this token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + - type: "object" + title: "Private App" + required: + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials set" + const: "Private App Credentials" + order: 0 + enum: + - "Private App Credentials" + access_token: + title: "Access token" + description: + "HubSpot Access token. See the Hubspot docs if you need help finding this token." + type: "string" + airbyte_secret: true + enable_experimental_streams: + title: "Enable experimental streams" + description: + "If enabled then experimental streams become available for\ + \ sync." + type: "boolean" + default: false + source-rss: + title: "RSS Spec" + type: "object" + required: + - "url" + - "sourceType" + properties: + url: + type: "string" + description: "RSS Feed URL" + sourceType: + title: "rss" + const: "rss" + enum: + - "rss" + order: 0 + type: "string" + source-rss-update: + title: "RSS Spec" + type: "object" + required: + - "url" + properties: + url: + type: "string" + description: "RSS Feed URL" + source-sap-fieldglass: + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "sap-fieldglass" + const: "sap-fieldglass" + enum: + - "sap-fieldglass" + order: 0 + type: "string" + source-sap-fieldglass-update: + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + source-twilio-taskrouter: + type: "object" + required: + - "account_sid" + - "auth_token" + - "sourceType" + properties: + account_sid: + type: "string" + description: "Twilio Account ID" + title: "Account SID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + auth_token: + type: "string" + description: "Twilio Auth Token" + airbyte_secret: true + title: "Auth Token" + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "twilio-taskrouter" + const: "twilio-taskrouter" + enum: + - "twilio-taskrouter" + order: 0 + type: "string" + source-twilio-taskrouter-update: + type: "object" + required: + - "account_sid" + - "auth_token" + properties: + account_sid: + type: "string" + description: "Twilio Account ID" + title: "Account SID" + airbyte_secret: true + order: 0 + auth_token: + type: "string" + description: "Twilio Auth Token" + airbyte_secret: true + title: "Auth Token" + order: 1 + source-xkcd: + type: "object" + properties: + comic_number: + type: "string" + title: "comic_number" + description: + "Specifies the comic number in which details are to be extracted,\ + \ pagination will begin with that number to end of available comics" + default: "2960" + order: 0 + sourceType: + title: "xkcd" + const: "xkcd" + enum: + - "xkcd" + order: 0 + type: "string" + source-xkcd-update: + type: "object" + properties: + comic_number: + type: "string" + title: "comic_number" + description: + "Specifies the comic number in which details are to be extracted,\ + \ pagination will begin with that number to end of available comics" + default: "2960" + order: 0 + source-zenloop: + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + x-speakeasy-param-sensitive: true + date_from: + type: "string" + description: + "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: + "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + x-speakeasy-param-sensitive: true + survey_group_id: + type: "string" + description: + "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zenloop" + const: "zenloop" + enum: + - "zenloop" + order: 0 + type: "string" + source-zenloop-update: + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + date_from: + type: "string" + description: + "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: + "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + survey_group_id: + type: "string" + description: + "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + source-tempo: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + title: "API token" + description: + "Tempo API Token. Go to Tempo>Settings, scroll down to Data\ + \ Access and select API integration." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "tempo" + const: "tempo" + enum: + - "tempo" + order: 0 + type: "string" + source-tempo-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + title: "API token" + description: + "Tempo API Token. Go to Tempo>Settings, scroll down to Data\ + \ Access and select API integration." + airbyte_secret: true + order: 0 + source-chargebee: + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + - "sourceType" + properties: + site_api_key: + type: "string" + title: "API Key" + description: + "Chargebee API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + order: 1 + start_date: + type: "string" + format: "date-time" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000Z.\ + \ Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-25T00:00:00Z" + order: 2 + product_catalog: + type: "string" + title: "Product Catalog" + description: + "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section. If left blank, the product catalog\ + \ version will be set to 2.0." + enum: + - "1.0" + - "2.0" + default: "2.0" + order: 3 + sourceType: + title: "chargebee" + const: "chargebee" + enum: + - "chargebee" + order: 0 + type: "string" + source-chargebee-update: + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + properties: + site_api_key: + type: "string" + title: "API Key" + description: + "Chargebee API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + order: 1 + start_date: + type: "string" + format: "date-time" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000Z.\ + \ Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-25T00:00:00Z" + order: 2 + product_catalog: + type: "string" + title: "Product Catalog" + description: + "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section. If left blank, the product catalog\ + \ version will be set to 2.0." + enum: + - "1.0" + - "2.0" + default: "2.0" + order: 3 + source-onesignal: + title: "OneSignal Source Spec" + type: "object" + required: + - "user_auth_key" + - "start_date" + - "outcome_names" + - "applications" + - "sourceType" + properties: + user_auth_key: + type: "string" + title: "User Auth Key" + description: + "OneSignal User Auth Key, see the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + applications: + type: "array" + title: "Applications" + description: + "Applications keys, see the docs for more information on how to obtain this data" + items: + type: "object" + properties: + app_name: + type: "string" + title: "OneSignal App Name" + order: 0 + app_id: + type: "string" + title: "OneSignal App ID" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + app_api_key: + type: "string" + title: "REST API Key" + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "app_id" + - "app_api_key" + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for OneSignal\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 2 + outcome_names: + type: "string" + title: "Outcome Names" + description: + "Comma-separated list of names and the value (sum/count) for\ + \ the returned outcome data. See the docs for more details" + examples: + - "os__session_duration.count,os__click.count,CustomOutcomeName.sum" + order: 3 + sourceType: + title: "onesignal" + const: "onesignal" + enum: + - "onesignal" + order: 0 + type: "string" + source-onesignal-update: + title: "OneSignal Source Spec" + type: "object" + required: + - "user_auth_key" + - "start_date" + - "outcome_names" + - "applications" + properties: + user_auth_key: + type: "string" + title: "User Auth Key" + description: + "OneSignal User Auth Key, see the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + applications: + type: "array" + title: "Applications" + description: + "Applications keys, see the docs for more information on how to obtain this data" + items: + type: "object" + properties: + app_name: + type: "string" + title: "OneSignal App Name" + order: 0 + app_id: + type: "string" + title: "OneSignal App ID" + order: 1 + airbyte_secret: true + app_api_key: + type: "string" + title: "REST API Key" + order: 2 + airbyte_secret: true + required: + - "app_id" + - "app_api_key" + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for OneSignal\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 2 + outcome_names: + type: "string" + title: "Outcome Names" + description: + "Comma-separated list of names and the value (sum/count) for\ + \ the returned outcome data. See the docs for more details" + examples: + - "os__session_duration.count,os__click.count,CustomOutcomeName.sum" + order: 3 + source-google-analytics-data-api: + title: "Google Analytics (Data API) Spec" + type: "object" + required: + - "property_ids" + - "sourceType" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Google Analytics developer\ + \ application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining a new access token." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + credentials_json: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key linked to the service account used for\ + \ authorization. For steps on obtaining this key, refer to the setup guide." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + property_ids: + title: "Property IDs" + description: + "A list of your Property IDs. The Property ID is a unique number\ + \ assigned to each property in Google Analytics, found in your GA4 property\ + \ URL. This ID allows the connector to track the specific events associated\ + \ with your property. Refer to the Google\ + \ Analytics documentation to locate your property ID." + order: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + examples: + - - "1738294" + - "5729978930" + uniqueItems: true + date_ranges_start_date: + type: "string" + title: "Start Date" + description: + "The start date from which to replicate report data in the\ + \ format YYYY-MM-DD. Data generated before this date will not be included\ + \ in the report. Not applied to custom Cohort reports." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 4 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name." + type: "string" + order: 0 + dimensions: + title: "Dimensions" + description: "A list of dimensions." + type: "array" + items: + type: "string" + minItems: 1 + order: 1 + metrics: + title: "Metrics" + description: "A list of metrics." + type: "array" + items: + type: "string" + minItems: 1 + order: 2 + dimensionFilter: + title: "Dimensions filter" + description: "Dimensions filter" + type: "object" + order: 3 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all dimensions." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + metricFilter: + title: "Metrics filter" + description: "Metrics filter" + type: "object" + order: 4 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all metrics." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + cohortSpec: + title: "Cohort Reports" + description: + "Cohort reports creates a time series of user retention\ + \ for the cohort." + type: "object" + order: 5 + oneOf: + - title: "Disabled" + type: "object" + properties: + enabled: + type: "string" + const: "false" + enum: + - "false" + - title: "Enabled" + type: "object" + properties: + enabled: + type: "string" + const: "true" + enum: + - "true" + cohorts: + name: "Cohorts" + order: 0 + type: "array" + always_show: true + items: + title: "Cohorts" + type: "object" + required: + - "dimension" + - "dateRange" + properties: + name: + title: "Name" + type: "string" + always_show: true + pattern: "^(?!(cohort_|RESERVED_)).*$" + description: + "Assigns a name to this cohort. If not set,\ + \ cohorts are named by their zero based index cohort_0,\ + \ cohort_1, etc." + order: 0 + dimension: + title: "Dimension" + description: + "Dimension used by the cohort. Required and\ + \ only supports `firstSessionDate`" + type: "string" + enum: + - "firstSessionDate" + order: 1 + dateRange: + type: "object" + required: + - "startDate" + - "endDate" + properties: + startDate: + title: "Start Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + endDate: + title: "End Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 3 + cohortsRange: + type: "object" + order: 1 + required: + - "granularity" + - "endOffset" + properties: + granularity: + title: "Granularity" + description: + "The granularity used to interpret the startOffset\ + \ and endOffset for the extended reporting date range\ + \ for a cohort report." + type: "string" + enum: + - "GRANULARITY_UNSPECIFIED" + - "DAILY" + - "WEEKLY" + - "MONTHLY" + order: 0 + startOffset: + title: "Start Offset" + description: + "Specifies the start date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 1 + endOffset: + title: "End Offset" + description: + "Specifies the end date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 2 + cohortReportSettings: + type: "object" + title: "Cohort Report Settings" + description: "Optional settings for a cohort report." + properties: + accumulate: + always_show: true + title: "Accumulate" + description: + "If true, accumulates the result from first\ + \ touch day to the end day" + type: "boolean" + required: + - "name" + - "dimensions" + - "metrics" + window_in_days: + type: "integer" + title: "Data Request Interval (Days)" + description: + "The interval in days for each data request made to the Google\ + \ Analytics API. A larger value speeds up data sync, but increases the\ + \ chance of data sampling, which may result in inaccuracies. We recommend\ + \ a value of 1 to minimize sampling, unless speed is an absolute priority\ + \ over accuracy. Acceptable values range from 1 to 364. Does not apply\ + \ to custom Cohort reports. More information is available in the documentation." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + minimum: 1 + maximum: 364 + default: 1 + order: 5 + lookback_window: + type: "integer" + title: "Lookback window (Days)" + description: + "Since attribution changes after the event date, and Google\ + \ Analytics has a data processing latency, we should specify how many\ + \ days in the past we should refresh the data in every run. So if you\ + \ set it at 5 days, in every sync it will fetch the last bookmark date\ + \ minus 5 days." + examples: + - 2 + - 3 + - 4 + - 7 + - 14 + - 28 + minimum: 2 + maximum: 60 + default: 2 + order: 6 + keep_empty_rows: + type: "boolean" + title: "Keep Empty Rows" + description: + "If false, each row with all metrics equal to 0 will not be\ + \ returned. If true, these rows will be returned if they are not separately\ + \ removed by a filter. More information is available in the documentation." + default: false + order: 7 + convert_conversions_event: + type: "boolean" + title: "Convert `conversions:*` Metrics to Float" + description: + "Enables conversion of `conversions:*` event metrics from integers\ + \ to floats. This is beneficial for preventing data rounding when the\ + \ API returns float values for any `conversions:*` fields." + default: false + order: 8 + sourceType: + title: "google-analytics-data-api" + const: "google-analytics-data-api" + enum: + - "google-analytics-data-api" + order: 0 + type: "string" + source-google-analytics-data-api-update: + title: "Google Analytics (Data API) Spec" + type: "object" + required: + - "property_ids" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Google Analytics developer\ + \ application." + airbyte_secret: true + order: 2 + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining a new access token." + airbyte_secret: true + order: 3 + access_token: + title: "Access Token" + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 4 + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + credentials_json: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key linked to the service account used for\ + \ authorization. For steps on obtaining this key, refer to the setup guide." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + order: 1 + property_ids: + title: "Property IDs" + description: + "A list of your Property IDs. The Property ID is a unique number\ + \ assigned to each property in Google Analytics, found in your GA4 property\ + \ URL. This ID allows the connector to track the specific events associated\ + \ with your property. Refer to the Google\ + \ Analytics documentation to locate your property ID." + order: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + examples: + - - "1738294" + - "5729978930" + uniqueItems: true + date_ranges_start_date: + type: "string" + title: "Start Date" + description: + "The start date from which to replicate report data in the\ + \ format YYYY-MM-DD. Data generated before this date will not be included\ + \ in the report. Not applied to custom Cohort reports." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 4 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name." + type: "string" + order: 0 + dimensions: + title: "Dimensions" + description: "A list of dimensions." + type: "array" + items: + type: "string" + minItems: 1 + order: 1 + metrics: + title: "Metrics" + description: "A list of metrics." + type: "array" + items: + type: "string" + minItems: 1 + order: 2 + dimensionFilter: + title: "Dimensions filter" + description: "Dimensions filter" + type: "object" + order: 3 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all dimensions." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + metricFilter: + title: "Metrics filter" + description: "Metrics filter" + type: "object" + order: 4 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all metrics." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + cohortSpec: + title: "Cohort Reports" + description: + "Cohort reports creates a time series of user retention\ + \ for the cohort." + type: "object" + order: 5 + oneOf: + - title: "Disabled" + type: "object" + properties: + enabled: + type: "string" + const: "false" + enum: + - "false" + - title: "Enabled" + type: "object" + properties: + enabled: + type: "string" + const: "true" + enum: + - "true" + cohorts: + name: "Cohorts" + order: 0 + type: "array" + always_show: true + items: + title: "Cohorts" + type: "object" + required: + - "dimension" + - "dateRange" + properties: + name: + title: "Name" + type: "string" + always_show: true + pattern: "^(?!(cohort_|RESERVED_)).*$" + description: + "Assigns a name to this cohort. If not set,\ + \ cohorts are named by their zero based index cohort_0,\ + \ cohort_1, etc." + order: 0 + dimension: + title: "Dimension" + description: + "Dimension used by the cohort. Required and\ + \ only supports `firstSessionDate`" + type: "string" + enum: + - "firstSessionDate" + order: 1 + dateRange: + type: "object" + required: + - "startDate" + - "endDate" + properties: + startDate: + title: "Start Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + endDate: + title: "End Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 3 + cohortsRange: + type: "object" + order: 1 + required: + - "granularity" + - "endOffset" + properties: + granularity: + title: "Granularity" + description: + "The granularity used to interpret the startOffset\ + \ and endOffset for the extended reporting date range\ + \ for a cohort report." + type: "string" + enum: + - "GRANULARITY_UNSPECIFIED" + - "DAILY" + - "WEEKLY" + - "MONTHLY" + order: 0 + startOffset: + title: "Start Offset" + description: + "Specifies the start date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 1 + endOffset: + title: "End Offset" + description: + "Specifies the end date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 2 + cohortReportSettings: + type: "object" + title: "Cohort Report Settings" + description: "Optional settings for a cohort report." + properties: + accumulate: + always_show: true + title: "Accumulate" + description: + "If true, accumulates the result from first\ + \ touch day to the end day" + type: "boolean" + required: + - "name" + - "dimensions" + - "metrics" + window_in_days: + type: "integer" + title: "Data Request Interval (Days)" + description: + "The interval in days for each data request made to the Google\ + \ Analytics API. A larger value speeds up data sync, but increases the\ + \ chance of data sampling, which may result in inaccuracies. We recommend\ + \ a value of 1 to minimize sampling, unless speed is an absolute priority\ + \ over accuracy. Acceptable values range from 1 to 364. Does not apply\ + \ to custom Cohort reports. More information is available in the documentation." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + minimum: 1 + maximum: 364 + default: 1 + order: 5 + lookback_window: + type: "integer" + title: "Lookback window (Days)" + description: + "Since attribution changes after the event date, and Google\ + \ Analytics has a data processing latency, we should specify how many\ + \ days in the past we should refresh the data in every run. So if you\ + \ set it at 5 days, in every sync it will fetch the last bookmark date\ + \ minus 5 days." + examples: + - 2 + - 3 + - 4 + - 7 + - 14 + - 28 + minimum: 2 + maximum: 60 + default: 2 + order: 6 + keep_empty_rows: + type: "boolean" + title: "Keep Empty Rows" + description: + "If false, each row with all metrics equal to 0 will not be\ + \ returned. If true, these rows will be returned if they are not separately\ + \ removed by a filter. More information is available in the documentation." + default: false + order: 7 + convert_conversions_event: + type: "boolean" + title: "Convert `conversions:*` Metrics to Float" + description: + "Enables conversion of `conversions:*` event metrics from integers\ + \ to floats. This is beneficial for preventing data rounding when the\ + \ API returns float values for any `conversions:*` fields." + default: false + order: 8 + source-mailgun: + type: "object" + required: + - "private_key" + - "sourceType" + properties: + private_key: + type: "string" + order: 0 + title: "Private API Key" + description: "Primary account API key to access your Mailgun data." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain_region: + type: "string" + order: 1 + title: "Domain Region Code" + description: + "Domain region code. 'EU' or 'US' are possible values. The\ + \ default is 'US'." + default: "US" + enum: + - "US" + - "EU" + start_date: + type: "string" + order: 2 + title: "Replication Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2023-08-01T00:00:00Z" + description: + "UTC date and time in the format 2020-10-01 00:00:00. Any data\ + \ before this date will not be replicated. If omitted, defaults to 3 days\ + \ ago." + sourceType: + title: "mailgun" + const: "mailgun" + enum: + - "mailgun" + order: 0 + type: "string" + source-mailgun-update: + type: "object" + required: + - "private_key" + properties: + private_key: + type: "string" + order: 0 + title: "Private API Key" + description: "Primary account API key to access your Mailgun data." + airbyte_secret: true + domain_region: + type: "string" + order: 1 + title: "Domain Region Code" + description: + "Domain region code. 'EU' or 'US' are possible values. The\ + \ default is 'US'." + default: "US" + enum: + - "US" + - "EU" + start_date: + type: "string" + order: 2 + title: "Replication Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2023-08-01T00:00:00Z" + description: + "UTC date and time in the format 2020-10-01 00:00:00. Any data\ + \ before this date will not be replicated. If omitted, defaults to 3 days\ + \ ago." + source-intercom: + title: "Source Intercom Spec" + type: "object" + required: + - "start_date" + - "access_token" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + title: "Access token" + type: "string" + description: + "Access token for making authenticated requests. See the Intercom docs for more information." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + activity_logs_time_step: + type: "integer" + default: 30 + minimum: 1 + maximum: 91 + title: "Activity logs stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of Activity\ + \ Logs stream." + examples: + - 30 + - 10 + - 5 + order: 3 + sourceType: + title: "intercom" + const: "intercom" + enum: + - "intercom" + order: 0 + type: "string" + source-intercom-update: + title: "Source Intercom Spec" + type: "object" + required: + - "start_date" + - "access_token" + properties: + start_date: + type: "string" + title: "Start date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + title: "Access token" + type: "string" + description: + "Access token for making authenticated requests. See the Intercom docs for more information." + airbyte_secret: true + order: 0 + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + activity_logs_time_step: + type: "integer" + default: 30 + minimum: 1 + maximum: 91 + title: "Activity logs stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of Activity\ + \ Logs stream." + examples: + - 30 + - 10 + - 5 + order: 3 + source-rki-covid: + title: "RKI Covid Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format 2017-01-25. Any data before this date\ + \ will not be replicated." + order: 1 + sourceType: + title: "rki-covid" + const: "rki-covid" + enum: + - "rki-covid" + order: 0 + type: "string" + source-rki-covid-update: + title: "RKI Covid Spec" + type: "object" + required: + - "start_date" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format 2017-01-25. Any data before this date\ + \ will not be replicated." + order: 1 + source-secoda: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Api Key" + airbyte_secret: true + description: + "Your API Access Key. See here. The key is case sensitive." + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "secoda" + const: "secoda" + enum: + - "secoda" + order: 0 + type: "string" + source-secoda-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "Api Key" + airbyte_secret: true + description: + "Your API Access Key. See here. The key is case sensitive." + order: 0 + source-zoom: + title: "Zoom Spec" + type: "object" + required: + - "account_id" + - "client_id" + - "client_secret" + - "authorization_endpoint" + - "sourceType" + properties: + account_id: + type: "string" + order: 0 + description: + "The account ID for your Zoom account. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + client_id: + type: "string" + order: 1 + description: + "The client ID for your Zoom app. You can find this in the\ + \ Zoom Marketplace under the \"Manage\" tab for your app." + client_secret: + type: "string" + order: 2 + description: + "The client secret for your Zoom app. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + airbyte_secret: true + x-speakeasy-param-sensitive: true + authorization_endpoint: + type: "string" + order: 3 + default: "https://zoom.us/oauth/token" + sourceType: + title: "zoom" + const: "zoom" + enum: + - "zoom" + order: 0 + type: "string" + source-zoom-update: + title: "Zoom Spec" + type: "object" + required: + - "account_id" + - "client_id" + - "client_secret" + - "authorization_endpoint" + properties: + account_id: + type: "string" + order: 0 + description: + "The account ID for your Zoom account. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + client_id: + type: "string" + order: 1 + description: + "The client ID for your Zoom app. You can find this in the\ + \ Zoom Marketplace under the \"Manage\" tab for your app." + client_secret: + type: "string" + order: 2 + description: + "The client secret for your Zoom app. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + airbyte_secret: true + authorization_endpoint: + type: "string" + order: 3 + default: "https://zoom.us/oauth/token" + source-delighted: + title: "Delighted Spec" + type: "object" + required: + - "since" + - "api_key" + - "sourceType" + properties: + api_key: + title: "Delighted API Key" + type: "string" + description: "A Delighted API key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + since: + title: "Replication Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + examples: + - "2022-05-30T04:50:23Z" + - "2022-05-30 04:50:23" + pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" + order: 1 + format: "date-time" + sourceType: + title: "delighted" + const: "delighted" + enum: + - "delighted" + order: 0 + type: "string" + source-delighted-update: + title: "Delighted Spec" + type: "object" + required: + - "since" + - "api_key" + properties: + api_key: + title: "Delighted API Key" + type: "string" + description: "A Delighted API key." + airbyte_secret: true + order: 0 + since: + title: "Replication Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + examples: + - "2022-05-30T04:50:23Z" + - "2022-05-30 04:50:23" + pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" + order: 1 + format: "date-time" + source-klarna: + title: "Klarna Spec" + type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + - "sourceType" + properties: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "na" + - "oc" + description: + "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'na', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: + "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: + "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: + "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "klarna" + const: "klarna" + enum: + - "klarna" + order: 0 + type: "string" + source-klarna-update: + title: "Klarna Spec" + type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + properties: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "na" + - "oc" + description: + "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'na', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: + "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: + "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: + "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + source-typeform: + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Typeform account and then generate a personal\ + \ Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Typeform\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + format: "date-time" + form_ids: + title: "Form IDs to replicate" + description: + "When this parameter is set, the connector will replicate data\ + \ only from the input forms. Otherwise, all forms in your Typeform account\ + \ will be replicated. You can find form IDs in your form URLs. For example,\ + \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\ + \ u6nXL7. You can find form URLs on Share panel" + type: "array" + items: + type: "string" + uniqueItems: true + order: 3 + sourceType: + title: "typeform" + const: "typeform" + enum: + - "typeform" + order: 0 + type: "string" + source-typeform-update: + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Typeform account and then generate a personal\ + \ Access Token." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Typeform\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + format: "date-time" + form_ids: + title: "Form IDs to replicate" + description: + "When this parameter is set, the connector will replicate data\ + \ only from the input forms. Otherwise, all forms in your Typeform account\ + \ will be replicated. You can find form IDs in your form URLs. For example,\ + \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\ + \ u6nXL7. You can find form URLs on Share panel" + type: "array" + items: + type: "string" + uniqueItems: true + order: 3 + source-dremio: + title: "Dremio Spec" + type: "object" + required: + - "api_key" + - "base_url" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API Key that is generated when you authenticate to Dremio\ + \ API" + airbyte_secret: true + x-speakeasy-param-sensitive: true + base_url: + type: "string" + description: "URL of your Dremio instance" + default: "https://app.dremio.cloud" + sourceType: + title: "dremio" + const: "dremio" + enum: + - "dremio" + order: 0 + type: "string" + source-dremio-update: + title: "Dremio Spec" + type: "object" + required: + - "api_key" + - "base_url" + properties: + api_key: + type: "string" + description: + "API Key that is generated when you authenticate to Dremio\ + \ API" + airbyte_secret: true + base_url: + type: "string" + description: "URL of your Dremio instance" + default: "https://app.dremio.cloud" + source-cimis: + type: "object" + required: + - "api_key" + - "targets_type" + - "targets" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + targets_type: + type: "string" + title: "Targets Type" + enum: + - "WSN station numbers" + - "California zip codes" + - "decimal-degree coordinates" + - "street addresses" + order: 1 + targets: + type: "array" + title: "Targets" + order: 2 + daily_data_items: + type: "array" + title: "Daily Data Items" + enum: + - "day-air-tmp-avg" + - "day-air-tmp-min" + - "day-dew-pnt" + - "day-eto" + - "day-asce-eto" + - "day-asce-etr" + - "day-precip" + - "day-rel-hum-avg" + - "day-rel-hum-max" + - "day-rel-hum-min" + - "day-soil-tmp-avg" + - "day-soil-tmp-max" + - "day-soil-tmp-min" + - "day-sol-rad-avg" + - "day-sol-rad-net" + - "day-vap-pres-max" + - "day-vap-pres-avg" + - "day-wind-ene" + - "day-wind-ese" + - "day-wind-nne" + - "day-wind-nnw" + - "day-wind-run" + - "day-wind-spd-avg" + - "day-wind-ssw" + - "day-wind-wnw" + - "day-wind-wsw" + order: 3 + hourly_data_items: + type: "array" + title: "Hourly Data Items" + enum: + - "hly-air-tmp" + - "hly-dew-pnt" + - "hly-eto" + - "hly-net-rad" + - "hly-asce-eto" + - "hly-asce-etr" + - "hly-precip" + - "hly-rel-hum" + - "hly-res-wind" + - "hly-soil-tmp" + - "hly-sol-rad" + - "hly-vap-pres" + - "hly-wind-dir" + - "hly-wind-spd" + order: 4 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + unit_of_measure: + type: "string" + title: "Unit of Measure" + enum: + - "E" + - "M" + order: 7 + sourceType: + title: "cimis" + const: "cimis" + enum: + - "cimis" + order: 0 + type: "string" + source-cimis-update: + type: "object" + required: + - "api_key" + - "targets_type" + - "targets" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 0 + targets_type: + type: "string" + title: "Targets Type" + enum: + - "WSN station numbers" + - "California zip codes" + - "decimal-degree coordinates" + - "street addresses" + order: 1 + targets: + type: "array" + title: "Targets" + order: 2 + daily_data_items: + type: "array" + title: "Daily Data Items" + enum: + - "day-air-tmp-avg" + - "day-air-tmp-min" + - "day-dew-pnt" + - "day-eto" + - "day-asce-eto" + - "day-asce-etr" + - "day-precip" + - "day-rel-hum-avg" + - "day-rel-hum-max" + - "day-rel-hum-min" + - "day-soil-tmp-avg" + - "day-soil-tmp-max" + - "day-soil-tmp-min" + - "day-sol-rad-avg" + - "day-sol-rad-net" + - "day-vap-pres-max" + - "day-vap-pres-avg" + - "day-wind-ene" + - "day-wind-ese" + - "day-wind-nne" + - "day-wind-nnw" + - "day-wind-run" + - "day-wind-spd-avg" + - "day-wind-ssw" + - "day-wind-wnw" + - "day-wind-wsw" + order: 3 + hourly_data_items: + type: "array" + title: "Hourly Data Items" + enum: + - "hly-air-tmp" + - "hly-dew-pnt" + - "hly-eto" + - "hly-net-rad" + - "hly-asce-eto" + - "hly-asce-etr" + - "hly-precip" + - "hly-rel-hum" + - "hly-res-wind" + - "hly-soil-tmp" + - "hly-sol-rad" + - "hly-vap-pres" + - "hly-wind-dir" + - "hly-wind-spd" + order: 4 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + unit_of_measure: + type: "string" + title: "Unit of Measure" + enum: + - "E" + - "M" + order: 7 + source-paypal-transaction: + type: "object" + required: + - "client_id" + - "client_secret" + - "start_date" + - "is_sandbox" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Paypal developer application." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "The Client Secret of your Paypal developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\ + \ present time." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 2 + is_sandbox: + title: "Sandbox" + description: "Determines whether to use the sandbox or production environment." + type: "boolean" + default: false + dispute_start_date: + title: "Dispute Start Date Range" + description: + "Start Date parameter for the list dispute endpoint in ISO format.\ + \ This Start Date must be in range within 180 days before present time,\ + \ and requires ONLY 3 miliseconds(mandatory). If you don't use this option,\ + \ it defaults to a start date set 180 days in the past." + type: "string" + examples: + - "2021-06-11T23:59:59.000Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{3}Z$" + format: "date-time" + order: 3 + end_date: + title: "End Date" + description: + "End Date for data extraction in ISO format. This can be help you select specific range of time,\ + \ mainly for test purposes or data integrity tests. When this is not\ + \ used, now_utc() is used by the streams. This does not apply to Disputes\ + \ and Product streams." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 4 + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + time_window: + type: "integer" + title: "Number of days per request" + description: + "The number of days per request. Must be a number between 1\ + \ and 31." + default: 7 + minimum: 1 + maximum: 31 + sourceType: + title: "paypal-transaction" + const: "paypal-transaction" + enum: + - "paypal-transaction" + order: 0 + type: "string" + source-paypal-transaction-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "start_date" + - "is_sandbox" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Paypal developer application." + airbyte_secret: true + order: 0 + client_secret: + type: "string" + title: "Client secret" + description: "The Client Secret of your Paypal developer application." + airbyte_secret: true + order: 1 + start_date: + title: "Start Date" + description: + "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\ + \ present time." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 2 + is_sandbox: + title: "Sandbox" + description: "Determines whether to use the sandbox or production environment." + type: "boolean" + default: false + dispute_start_date: + title: "Dispute Start Date Range" + description: + "Start Date parameter for the list dispute endpoint in ISO format.\ + \ This Start Date must be in range within 180 days before present time,\ + \ and requires ONLY 3 miliseconds(mandatory). If you don't use this option,\ + \ it defaults to a start date set 180 days in the past." + type: "string" + examples: + - "2021-06-11T23:59:59.000Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{3}Z$" + format: "date-time" + order: 3 + end_date: + title: "End Date" + description: + "End Date for data extraction in ISO format. This can be help you select specific range of time,\ + \ mainly for test purposes or data integrity tests. When this is not\ + \ used, now_utc() is used by the streams. This does not apply to Disputes\ + \ and Product streams." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 4 + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + time_window: + type: "integer" + title: "Number of days per request" + description: + "The number of days per request. Must be a number between 1\ + \ and 31." + default: 7 + minimum: 1 + maximum: 31 + source-lemlist: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + title": "API key" + description: "Lemlist API key," + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "lemlist" + const: "lemlist" + enum: + - "lemlist" + order: 0 + type: "string" + source-lemlist-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + title": "API key" + description: "Lemlist API key," + order: 0 + source-pexels-api: + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key from the pexels website" + airbyte_secret: true + description: + "API key is required to access pexels api, For getting your's\ + \ goto https://www.pexels.com/api/documentation and create account for\ + \ free." + order: 0 + x-speakeasy-param-sensitive: true + color: + type: "string" + title: "Specific color for the search" + description: + "Optional, Desired photo color. Supported colors red, orange,\ + \ yellow, green, turquoise, blue, violet, pink, brown, black, gray, white\ + \ or any hexidecimal color code." + examples: + - "red" + - "orange" + order: 1 + locale: + type: "string" + title: "Specific locale for the search" + description: + "Optional, The locale of the search you are performing. The\ + \ current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE'\ + \ 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR'\ + \ 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR'\ + \ 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'." + examples: + - "en-US" + - "pt-BR" + order: 2 + orientation: + type: "string" + title: "Specific orientation for the search" + description: + "Optional, Desired photo orientation. The current supported\ + \ orientations are landscape, portrait or square" + examples: + - "square" + - "landscape" + order: 3 + query: + type: "string" + title: "Specific query for the search" + description: + "Optional, the search query, Example Ocean, Tigers, Pears,\ + \ etc." + examples: + - "people" + - "oceans" + order: 4 + size: + type: "string" + title: "Specific size for the search" + description: + "Optional, Minimum photo size. The current supported sizes\ + \ are large(24MP), medium(12MP) or small(4MP)." + examples: + - "large" + - "small" + order: 5 + sourceType: + title: "pexels-api" + const: "pexels-api" + enum: + - "pexels-api" + order: 0 + type: "string" + source-pexels-api-update: + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + title: "API Key from the pexels website" + airbyte_secret: true + description: + "API key is required to access pexels api, For getting your's\ + \ goto https://www.pexels.com/api/documentation and create account for\ + \ free." + order: 0 + color: + type: "string" + title: "Specific color for the search" + description: + "Optional, Desired photo color. Supported colors red, orange,\ + \ yellow, green, turquoise, blue, violet, pink, brown, black, gray, white\ + \ or any hexidecimal color code." + examples: + - "red" + - "orange" + order: 1 + locale: + type: "string" + title: "Specific locale for the search" + description: + "Optional, The locale of the search you are performing. The\ + \ current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE'\ + \ 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR'\ + \ 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR'\ + \ 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'." + examples: + - "en-US" + - "pt-BR" + order: 2 + orientation: + type: "string" + title: "Specific orientation for the search" + description: + "Optional, Desired photo orientation. The current supported\ + \ orientations are landscape, portrait or square" + examples: + - "square" + - "landscape" + order: 3 + query: + type: "string" + title: "Specific query for the search" + description: + "Optional, the search query, Example Ocean, Tigers, Pears,\ + \ etc." + examples: + - "people" + - "oceans" + order: 4 + size: + type: "string" + title: "Specific size for the search" + description: + "Optional, Minimum photo size. The current supported sizes\ + \ are large(24MP), medium(12MP) or small(4MP)." + examples: + - "large" + - "small" + order: 5 + source-leadfeeder: + type: "object" + required: + - "api_token" + - "start_date" + - "sourceType" + properties: + api_token: + type: "string" + order: 0 + title: "Api Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "leadfeeder" + const: "leadfeeder" + enum: + - "leadfeeder" + order: 0 + type: "string" + source-leadfeeder-update: + type: "object" + required: + - "api_token" + - "start_date" + properties: + api_token: + type: "string" + order: 0 + title: "Api Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-glassfrog: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API key provided by Glassfrog" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "glassfrog" + const: "glassfrog" + enum: + - "glassfrog" + order: 0 + type: "string" + source-glassfrog-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API key provided by Glassfrog" + order: 0 + source-appcues: + type: "object" + required: + - "username" + - "account_id" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + description: "Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)" + order: 2 + title: "Account ID" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "appcues" + const: "appcues" + enum: + - "appcues" + order: 0 + type: "string" + source-appcues-update: + type: "object" + required: + - "username" + - "account_id" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + account_id: + type: "string" + description: "Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)" + order: 2 + title: "Account ID" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-facebook-marketing: + title: "Source Facebook Marketing" + type: "object" + properties: + account_ids: + title: "Ad Account ID(s)" + description: + "The Facebook Ad account ID(s) to pull data from. The Ad account\ + \ ID number is in the account dropdown menu or in your browser's address\ + \ bar of your Meta Ads Manager. See the docs for more information." + order: 0 + pattern_descriptor: "The Ad Account ID must be a number." + examples: + - "111111111111111" + minItems: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]+$" + uniqueItems: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’s Dashboard,\ + \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\ + \ ads_read, read_insights, business_management. Then click on \"Get\ + \ token\". See the docs for more information." + order: 1 + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + credentials: + title: "Authentication" + description: "Credentials for connecting to the Facebook Marketing API" + type: "object" + oneOf: + - title: "Authenticate via Facebook Marketing (Oauth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Facebook Marketing API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret for the Facebook Marketing API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "access_token" + - "auth_type" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for all incremental\ + \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\ + \ will be replicated for usual streams and only last 2 years for insight\ + \ streams." + order: 2 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not setting\ + \ this option will result in always syncing the latest data." + order: 3 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + campaign_statuses: + title: "Campaign Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 4 + type: "array" + items: + title: "ValidCampaignStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + adset_statuses: + title: "AdSet Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 5 + type: "array" + items: + title: "ValidAdSetStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + ad_statuses: + title: "Ad Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 6 + type: "array" + items: + title: "ValidAdStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ADSET_PAUSED" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "DISAPPROVED" + - "IN_PROCESS" + - "PAUSED" + - "PENDING_BILLING_INFO" + - "PENDING_REVIEW" + - "PREAPPROVED" + - "WITH_ISSUES" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images from Ad Creative" + description: + "Set to active if you want to fetch the thumbnail_url and store\ + \ the result in thumbnail_data_url for each Ad Creative." + default: false + order: 7 + type: "boolean" + custom_insights: + title: "Custom Insights" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + order: 8 + type: "array" + items: + title: "InsightConfig" + description: "Config for custom insights" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + level: + title: "Level" + description: "Chosen level for API" + default: "ad" + enum: + - "ad" + - "adset" + - "campaign" + - "account" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "account_currency" + - "account_id" + - "account_name" + - "action_values" + - "actions" + - "ad_click_actions" + - "ad_id" + - "ad_impression_actions" + - "ad_name" + - "adset_end" + - "adset_id" + - "adset_name" + - "age_targeting" + - "attribution_setting" + - "auction_bid" + - "auction_competitiveness" + - "auction_max_competitor_bid" + - "buying_type" + - "campaign_id" + - "campaign_name" + - "canvas_avg_view_percent" + - "canvas_avg_view_time" + - "catalog_segment_actions" + - "catalog_segment_value" + - "catalog_segment_value_mobile_purchase_roas" + - "catalog_segment_value_omni_purchase_roas" + - "catalog_segment_value_website_purchase_roas" + - "clicks" + - "conversion_rate_ranking" + - "conversion_values" + - "conversions" + - "converted_product_quantity" + - "converted_product_value" + - "cost_per_15_sec_video_view" + - "cost_per_2_sec_continuous_video_view" + - "cost_per_action_type" + - "cost_per_ad_click" + - "cost_per_conversion" + - "cost_per_dda_countby_convs" + - "cost_per_estimated_ad_recallers" + - "cost_per_inline_link_click" + - "cost_per_inline_post_engagement" + - "cost_per_one_thousand_ad_impression" + - "cost_per_outbound_click" + - "cost_per_thruplay" + - "cost_per_unique_action_type" + - "cost_per_unique_click" + - "cost_per_unique_conversion" + - "cost_per_unique_inline_link_click" + - "cost_per_unique_outbound_click" + - "cpc" + - "cpm" + - "cpp" + - "created_time" + - "creative_media_type" + - "ctr" + - "date_start" + - "date_stop" + - "dda_countby_convs" + - "dda_results" + - "engagement_rate_ranking" + - "estimated_ad_recall_rate" + - "estimated_ad_recall_rate_lower_bound" + - "estimated_ad_recall_rate_upper_bound" + - "estimated_ad_recallers" + - "estimated_ad_recallers_lower_bound" + - "estimated_ad_recallers_upper_bound" + - "frequency" + - "full_view_impressions" + - "full_view_reach" + - "gender_targeting" + - "impressions" + - "inline_link_click_ctr" + - "inline_link_clicks" + - "inline_post_engagement" + - "instagram_upcoming_event_reminders_set" + - "instant_experience_clicks_to_open" + - "instant_experience_clicks_to_start" + - "instant_experience_outbound_clicks" + - "interactive_component_tap" + - "labels" + - "location" + - "marketing_messages_cost_per_delivered" + - "marketing_messages_cost_per_link_btn_click" + - "marketing_messages_spend" + - "mobile_app_purchase_roas" + - "objective" + - "optimization_goal" + - "outbound_clicks" + - "outbound_clicks_ctr" + - "place_page_name" + - "purchase_roas" + - "qualifying_question_qualify_answer_rate" + - "quality_ranking" + - "reach" + - "social_spend" + - "spend" + - "total_postbacks" + - "total_postbacks_detailed" + - "total_postbacks_detailed_v4" + - "unique_actions" + - "unique_clicks" + - "unique_conversions" + - "unique_ctr" + - "unique_inline_link_click_ctr" + - "unique_inline_link_clicks" + - "unique_link_clicks_ctr" + - "unique_outbound_clicks" + - "unique_outbound_clicks_ctr" + - "unique_video_continuous_2_sec_watched_actions" + - "unique_video_view_15_sec" + - "updated_time" + - "video_15_sec_watched_actions" + - "video_30_sec_watched_actions" + - "video_avg_time_watched_actions" + - "video_continuous_2_sec_watched_actions" + - "video_p100_watched_actions" + - "video_p25_watched_actions" + - "video_p50_watched_actions" + - "video_p75_watched_actions" + - "video_p95_watched_actions" + - "video_play_actions" + - "video_play_curve_actions" + - "video_play_retention_0_to_15s_actions" + - "video_play_retention_20_to_60s_actions" + - "video_play_retention_graph_actions" + - "video_thruplay_watched_actions" + - "video_time_watched_actions" + - "website_ctr" + - "website_purchase_roas" + - "wish_bid" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + title: "ValidBreakdowns" + description: "An enumeration." + enum: + - "ad_format_asset" + - "age" + - "app_id" + - "body_asset" + - "call_to_action_asset" + - "coarse_conversion_value" + - "country" + - "description_asset" + - "device_platform" + - "dma" + - "fidelity_type" + - "frequency_value" + - "gender" + - "hourly_stats_aggregated_by_advertiser_time_zone" + - "hourly_stats_aggregated_by_audience_time_zone" + - "hsid" + - "image_asset" + - "impression_device" + - "is_conversion_id_modeled" + - "landing_destination" + - "link_url_asset" + - "marketing_messages_btn_name" + - "mdsa_landing_destination" + - "media_asset_url" + - "media_creator" + - "media_destination_url" + - "media_format" + - "media_origin_url" + - "media_text_content" + - "mmm" + - "place_page_id" + - "platform_position" + - "postback_sequence_index" + - "product_id" + - "publisher_platform" + - "redownload" + - "region" + - "skan_campaign_id" + - "skan_conversion_id" + - "skan_version" + - "standard_event_content_type" + - "title_asset" + - "video_asset" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + title: "ValidActionBreakdowns" + description: "An enumeration." + enum: + - "action_canvas_component_name" + - "action_carousel_card_id" + - "action_carousel_card_name" + - "action_destination" + - "action_device" + - "action_reaction" + - "action_target_id" + - "action_type" + - "action_video_sound" + - "action_video_type" + - "standard_event_content_type" + action_report_time: + title: "Action Report Time" + description: + "Determines the report time of action stats. For example,\ + \ if a person saw the ad on Jan 1st but converted on Jan 2nd, when\ + \ you query the API with action_report_time=impression, you see\ + \ a conversion on Jan 1st. When you query the API with action_report_time=conversion,\ + \ you see a conversion on Jan 2nd." + default: "mixed" + enum: + - "conversion" + - "impression" + - "mixed" + type: "string" + time_increment: + title: "Time Increment" + description: + "Time window in days by which to aggregate statistics.\ + \ The sync will be chunked into N day intervals, where N is the\ + \ number of days you specified. For example, if you set this value\ + \ to 7, then all statistics will be reported as 7-day aggregates\ + \ by starting from the start_date. If the start and end dates are\ + \ October 1st and October 30th, then the connector will output 5\ + \ records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days\ + \ only). The minimum allowed value for this field is 1, and the\ + \ maximum is 89." + default: 1 + maximum: 89 + minimum: 1 + exclusiveMinimum: 0 + type: "integer" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not\ + \ setting this option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + insights_lookback_window: + title: "Custom Insights Lookback Window" + description: "The attribution window" + default: 28 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Custom Insights Job Timeout" + description: "The insights job timeout" + default: 60 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + page_size: + title: "Page Size of Requests" + description: + "Page size used when sending requests to Facebook API to specify\ + \ number of records per page when response has pagination. Most users\ + \ do not need to set this field unless they specifically need to tune\ + \ the connector to address specific issues or use cases." + default: 100 + order: 10 + exclusiveMinimum: 0 + type: "integer" + insights_lookback_window: + title: "Insights Lookback Window" + description: + "The attribution window. Facebook freezes insight data 28 days\ + \ after it was generated, which means that all data from the past 28 days\ + \ may have changed since we last emitted it, so you can retrieve refreshed\ + \ insights from the past by setting this parameter. If you set a custom\ + \ lookback window value in Facebook account, please provide the same value\ + \ here." + default: 28 + order: 11 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Insights Job Timeout" + description: + "Insights Job Timeout establishes the maximum amount of time\ + \ (in minutes) of waiting for the report job to complete. When timeout\ + \ is reached the job is considered failed and we are trying to request\ + \ smaller amount of data by breaking the job to few smaller ones. If you\ + \ definitely know that 60 minutes is not enough for your report to be\ + \ processed then you can decrease the timeout value, so we start breaking\ + \ job to smaller parts faster." + default: 60 + order: 12 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + sourceType: + title: "facebook-marketing" + const: "facebook-marketing" + enum: + - "facebook-marketing" + order: 0 + type: "string" + required: + - "account_ids" + - "credentials" + - "sourceType" + source-facebook-marketing-update: + title: "Source Facebook Marketing" + type: "object" + properties: + account_ids: + title: "Ad Account ID(s)" + description: + "The Facebook Ad account ID(s) to pull data from. The Ad account\ + \ ID number is in the account dropdown menu or in your browser's address\ + \ bar of your Meta Ads Manager. See the docs for more information." + order: 0 + pattern_descriptor: "The Ad Account ID must be a number." + examples: + - "111111111111111" + minItems: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]+$" + uniqueItems: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’s Dashboard,\ + \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\ + \ ads_read, read_insights, business_management. Then click on \"Get\ + \ token\". See the docs for more information." + order: 1 + airbyte_secret: true + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Facebook Marketing API" + type: "object" + oneOf: + - title: "Authenticate via Facebook Marketing (Oauth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Facebook Marketing API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Facebook Marketing API" + airbyte_secret: true + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + required: + - "client_id" + - "client_secret" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + required: + - "access_token" + - "auth_type" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for all incremental\ + \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\ + \ will be replicated for usual streams and only last 2 years for insight\ + \ streams." + order: 2 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not setting\ + \ this option will result in always syncing the latest data." + order: 3 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + campaign_statuses: + title: "Campaign Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 4 + type: "array" + items: + title: "ValidCampaignStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + adset_statuses: + title: "AdSet Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 5 + type: "array" + items: + title: "ValidAdSetStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + ad_statuses: + title: "Ad Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 6 + type: "array" + items: + title: "ValidAdStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ADSET_PAUSED" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "DISAPPROVED" + - "IN_PROCESS" + - "PAUSED" + - "PENDING_BILLING_INFO" + - "PENDING_REVIEW" + - "PREAPPROVED" + - "WITH_ISSUES" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images from Ad Creative" + description: + "Set to active if you want to fetch the thumbnail_url and store\ + \ the result in thumbnail_data_url for each Ad Creative." + default: false + order: 7 + type: "boolean" + custom_insights: + title: "Custom Insights" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + order: 8 + type: "array" + items: + title: "InsightConfig" + description: "Config for custom insights" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + level: + title: "Level" + description: "Chosen level for API" + default: "ad" + enum: + - "ad" + - "adset" + - "campaign" + - "account" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "account_currency" + - "account_id" + - "account_name" + - "action_values" + - "actions" + - "ad_click_actions" + - "ad_id" + - "ad_impression_actions" + - "ad_name" + - "adset_end" + - "adset_id" + - "adset_name" + - "age_targeting" + - "attribution_setting" + - "auction_bid" + - "auction_competitiveness" + - "auction_max_competitor_bid" + - "buying_type" + - "campaign_id" + - "campaign_name" + - "canvas_avg_view_percent" + - "canvas_avg_view_time" + - "catalog_segment_actions" + - "catalog_segment_value" + - "catalog_segment_value_mobile_purchase_roas" + - "catalog_segment_value_omni_purchase_roas" + - "catalog_segment_value_website_purchase_roas" + - "clicks" + - "conversion_rate_ranking" + - "conversion_values" + - "conversions" + - "converted_product_quantity" + - "converted_product_value" + - "cost_per_15_sec_video_view" + - "cost_per_2_sec_continuous_video_view" + - "cost_per_action_type" + - "cost_per_ad_click" + - "cost_per_conversion" + - "cost_per_dda_countby_convs" + - "cost_per_estimated_ad_recallers" + - "cost_per_inline_link_click" + - "cost_per_inline_post_engagement" + - "cost_per_one_thousand_ad_impression" + - "cost_per_outbound_click" + - "cost_per_thruplay" + - "cost_per_unique_action_type" + - "cost_per_unique_click" + - "cost_per_unique_conversion" + - "cost_per_unique_inline_link_click" + - "cost_per_unique_outbound_click" + - "cpc" + - "cpm" + - "cpp" + - "created_time" + - "creative_media_type" + - "ctr" + - "date_start" + - "date_stop" + - "dda_countby_convs" + - "dda_results" + - "engagement_rate_ranking" + - "estimated_ad_recall_rate" + - "estimated_ad_recall_rate_lower_bound" + - "estimated_ad_recall_rate_upper_bound" + - "estimated_ad_recallers" + - "estimated_ad_recallers_lower_bound" + - "estimated_ad_recallers_upper_bound" + - "frequency" + - "full_view_impressions" + - "full_view_reach" + - "gender_targeting" + - "impressions" + - "inline_link_click_ctr" + - "inline_link_clicks" + - "inline_post_engagement" + - "instagram_upcoming_event_reminders_set" + - "instant_experience_clicks_to_open" + - "instant_experience_clicks_to_start" + - "instant_experience_outbound_clicks" + - "interactive_component_tap" + - "labels" + - "location" + - "marketing_messages_cost_per_delivered" + - "marketing_messages_cost_per_link_btn_click" + - "marketing_messages_spend" + - "mobile_app_purchase_roas" + - "objective" + - "optimization_goal" + - "outbound_clicks" + - "outbound_clicks_ctr" + - "place_page_name" + - "purchase_roas" + - "qualifying_question_qualify_answer_rate" + - "quality_ranking" + - "reach" + - "social_spend" + - "spend" + - "total_postbacks" + - "total_postbacks_detailed" + - "total_postbacks_detailed_v4" + - "unique_actions" + - "unique_clicks" + - "unique_conversions" + - "unique_ctr" + - "unique_inline_link_click_ctr" + - "unique_inline_link_clicks" + - "unique_link_clicks_ctr" + - "unique_outbound_clicks" + - "unique_outbound_clicks_ctr" + - "unique_video_continuous_2_sec_watched_actions" + - "unique_video_view_15_sec" + - "updated_time" + - "video_15_sec_watched_actions" + - "video_30_sec_watched_actions" + - "video_avg_time_watched_actions" + - "video_continuous_2_sec_watched_actions" + - "video_p100_watched_actions" + - "video_p25_watched_actions" + - "video_p50_watched_actions" + - "video_p75_watched_actions" + - "video_p95_watched_actions" + - "video_play_actions" + - "video_play_curve_actions" + - "video_play_retention_0_to_15s_actions" + - "video_play_retention_20_to_60s_actions" + - "video_play_retention_graph_actions" + - "video_thruplay_watched_actions" + - "video_time_watched_actions" + - "website_ctr" + - "website_purchase_roas" + - "wish_bid" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + title: "ValidBreakdowns" + description: "An enumeration." + enum: + - "ad_format_asset" + - "age" + - "app_id" + - "body_asset" + - "call_to_action_asset" + - "coarse_conversion_value" + - "country" + - "description_asset" + - "device_platform" + - "dma" + - "fidelity_type" + - "frequency_value" + - "gender" + - "hourly_stats_aggregated_by_advertiser_time_zone" + - "hourly_stats_aggregated_by_audience_time_zone" + - "hsid" + - "image_asset" + - "impression_device" + - "is_conversion_id_modeled" + - "landing_destination" + - "link_url_asset" + - "marketing_messages_btn_name" + - "mdsa_landing_destination" + - "media_asset_url" + - "media_creator" + - "media_destination_url" + - "media_format" + - "media_origin_url" + - "media_text_content" + - "mmm" + - "place_page_id" + - "platform_position" + - "postback_sequence_index" + - "product_id" + - "publisher_platform" + - "redownload" + - "region" + - "skan_campaign_id" + - "skan_conversion_id" + - "skan_version" + - "standard_event_content_type" + - "title_asset" + - "video_asset" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + title: "ValidActionBreakdowns" + description: "An enumeration." + enum: + - "action_canvas_component_name" + - "action_carousel_card_id" + - "action_carousel_card_name" + - "action_destination" + - "action_device" + - "action_reaction" + - "action_target_id" + - "action_type" + - "action_video_sound" + - "action_video_type" + - "standard_event_content_type" + action_report_time: + title: "Action Report Time" + description: + "Determines the report time of action stats. For example,\ + \ if a person saw the ad on Jan 1st but converted on Jan 2nd, when\ + \ you query the API with action_report_time=impression, you see\ + \ a conversion on Jan 1st. When you query the API with action_report_time=conversion,\ + \ you see a conversion on Jan 2nd." + default: "mixed" + enum: + - "conversion" + - "impression" + - "mixed" + type: "string" + time_increment: + title: "Time Increment" + description: + "Time window in days by which to aggregate statistics.\ + \ The sync will be chunked into N day intervals, where N is the\ + \ number of days you specified. For example, if you set this value\ + \ to 7, then all statistics will be reported as 7-day aggregates\ + \ by starting from the start_date. If the start and end dates are\ + \ October 1st and October 30th, then the connector will output 5\ + \ records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days\ + \ only). The minimum allowed value for this field is 1, and the\ + \ maximum is 89." + default: 1 + maximum: 89 + minimum: 1 + exclusiveMinimum: 0 + type: "integer" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not\ + \ setting this option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + insights_lookback_window: + title: "Custom Insights Lookback Window" + description: "The attribution window" + default: 28 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Custom Insights Job Timeout" + description: "The insights job timeout" + default: 60 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + page_size: + title: "Page Size of Requests" + description: + "Page size used when sending requests to Facebook API to specify\ + \ number of records per page when response has pagination. Most users\ + \ do not need to set this field unless they specifically need to tune\ + \ the connector to address specific issues or use cases." + default: 100 + order: 10 + exclusiveMinimum: 0 + type: "integer" + insights_lookback_window: + title: "Insights Lookback Window" + description: + "The attribution window. Facebook freezes insight data 28 days\ + \ after it was generated, which means that all data from the past 28 days\ + \ may have changed since we last emitted it, so you can retrieve refreshed\ + \ insights from the past by setting this parameter. If you set a custom\ + \ lookback window value in Facebook account, please provide the same value\ + \ here." + default: 28 + order: 11 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Insights Job Timeout" + description: + "Insights Job Timeout establishes the maximum amount of time\ + \ (in minutes) of waiting for the report job to complete. When timeout\ + \ is reached the job is considered failed and we are trying to request\ + \ smaller amount of data by breaking the job to few smaller ones. If you\ + \ definitely know that 60 minutes is not enough for your report to be\ + \ processed then you can decrease the timeout value, so we start breaking\ + \ job to smaller parts faster." + default: 60 + order: 12 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "account_ids" + - "credentials" + source-recruitee: + type: "object" + required: + - "api_key" + - "company_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recruitee API Key. See here." + order: 0 + x-speakeasy-param-sensitive: true + company_id: + type: "integer" + title: "Company ID" + description: + "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + order: 1 + sourceType: + title: "recruitee" + const: "recruitee" + enum: + - "recruitee" + order: 0 + type: "string" + source-recruitee-update: + type: "object" + required: + - "api_key" + - "company_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recruitee API Key. See here." + order: 0 + company_id: + type: "integer" + title: "Company ID" + description: + "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + order: 1 + source-airbyte: + type: "object" + required: + - "start_date" + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 1 + title: "client_id" + start_date: + type: "string" + order: 0 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_secret: + type: "string" + order: 2 + title: "client_secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "airbyte" + const: "airbyte" + enum: + - "airbyte" + order: 0 + type: "string" + source-airbyte-update: + type: "object" + required: + - "start_date" + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 1 + title: "client_id" + start_date: + type: "string" + order: 0 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_secret: + type: "string" + order: 2 + title: "client_secret" + airbyte_secret: true + source-survey-sparrow: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Your access token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + region: + type: "object" + title: "Base URL" + description: + "Is your account location is EU based? If yes, the base url\ + \ to retrieve data will be different." + oneOf: + - type: "object" + title: "EU-based account" + properties: + url_base: + type: "string" + const: "https://eu-api.surveysparrow.com/v3" + enum: + - "https://eu-api.surveysparrow.com/v3" + - type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + default: + type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + order: 1 + survey_id: + type: "array" + description: "A List of your survey ids for survey-specific stream" + order: 2 + sourceType: + title: "survey-sparrow" + const: "survey-sparrow" + enum: + - "survey-sparrow" + order: 0 + type: "string" + source-survey-sparrow-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + description: + "Your access token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + region: + type: "object" + title: "Base URL" + description: + "Is your account location is EU based? If yes, the base url\ + \ to retrieve data will be different." + oneOf: + - type: "object" + title: "EU-based account" + properties: + url_base: + type: "string" + const: "https://eu-api.surveysparrow.com/v3" + enum: + - "https://eu-api.surveysparrow.com/v3" + - type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + default: + type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + order: 1 + survey_id: + type: "array" + description: "A List of your survey ids for survey-specific stream" + order: 2 + source-azure-table: + title: "Azure Data Table Spec" + type: "object" + required: + - "storage_account_name" + - "storage_access_key" + - "sourceType" + properties: + storage_account_name: + title: "Account Name" + type: "string" + description: "The name of your storage account." + order: 0 + airbyte_secret: false + x-speakeasy-param-sensitive: true + storage_access_key: + title: "Access Key" + type: "string" + description: + "Azure Table Storage Access Key. See the docs for more information on how to obtain this key." + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + storage_endpoint_suffix: + title: "Endpoint Suffix" + type: "string" + description: + "Azure Table Storage service account URL suffix. See the docs\ + \ for more information on how to obtain endpoint suffix" + order: 2 + default: "core.windows.net" + examples: + - "core.windows.net" + - "core.chinacloudapi.cn" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "azure-table" + const: "azure-table" + enum: + - "azure-table" + order: 0 + type: "string" + source-azure-table-update: + title: "Azure Data Table Spec" + type: "object" + required: + - "storage_account_name" + - "storage_access_key" + properties: + storage_account_name: + title: "Account Name" + type: "string" + description: "The name of your storage account." + order: 0 + airbyte_secret: false + storage_access_key: + title: "Access Key" + type: "string" + description: + "Azure Table Storage Access Key. See the docs for more information on how to obtain this key." + order: 1 + airbyte_secret: true + storage_endpoint_suffix: + title: "Endpoint Suffix" + type: "string" + description: + "Azure Table Storage service account URL suffix. See the docs\ + \ for more information on how to obtain endpoint suffix" + order: 2 + default: "core.windows.net" + examples: + - "core.windows.net" + - "core.chinacloudapi.cn" + airbyte_secret: false + source-customer-io: + type: "object" + required: + - "app_api_key" + - "sourceType" + properties: + app_api_key: + type: "string" + title: "Customer.io App API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "customer-io" + const: "customer-io" + enum: + - "customer-io" + order: 0 + type: "string" + source-customer-io-update: + type: "object" + required: + - "app_api_key" + properties: + app_api_key: + type: "string" + title: "Customer.io App API Key" + airbyte_secret: true + order: 0 + source-surveymonkey: + type: "object" + required: + - "start_date" + - "credentials" + - "sourceType" + properties: + origin: + type: "string" + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: + "Depending on the originating datacenter of the SurveyMonkey\ + \ account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: "object" + required: + - "auth_method" + - "access_token" + order: 2 + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + order: 3 + type: "string" + airbyte_secret: true + description: + "Access Token for making authenticated requests. See the\ + \ docs for information on how to generate this key." + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + order: 3 + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: "array" + order: 1000 + items: + type: "string" + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all boards to which you have access will be\ + \ replicated." + sourceType: + title: "surveymonkey" + const: "surveymonkey" + enum: + - "surveymonkey" + order: 0 + type: "string" + source-surveymonkey-update: + type: "object" + required: + - "start_date" + - "credentials" + properties: + origin: + type: "string" + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: + "Depending on the originating datacenter of the SurveyMonkey\ + \ account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: "object" + required: + - "auth_method" + - "access_token" + order: 2 + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + access_token: + title: "Access Token" + order: 3 + type: "string" + airbyte_secret: true + description: + "Access Token for making authenticated requests. See the\ + \ docs for information on how to generate this key." + start_date: + title: "Start Date" + order: 3 + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: "array" + order: 1000 + items: + type: "string" + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all boards to which you have access will be\ + \ replicated." + source-persistiq: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "PersistIq API Key. See the docs for more information on where to find that key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "persistiq" + const: "persistiq" + enum: + - "persistiq" + order: 0 + type: "string" + source-persistiq-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "PersistIq API Key. See the docs for more information on where to find that key." + airbyte_secret: true + order: 0 + source-configcat: + type: "object" + required: + - "username" + - "password" + - "sourceType" + properties: + username: + type: "string" + description: + "Basic auth user name. See here." + title: "Username" + order: 0 + password: + type: "string" + description: + "Basic auth password. See here." + title: "Password" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "configcat" + const: "configcat" + enum: + - "configcat" + order: 0 + type: "string" + source-configcat-update: + type: "object" + required: + - "username" + - "password" + properties: + username: + type: "string" + description: + "Basic auth user name. See here." + title: "Username" + order: 0 + password: + type: "string" + description: + "Basic auth password. See here." + title: "Password" + airbyte_secret: true + order: 1 + source-reddit: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + description: "Specifies the query for searching in reddits and subreddits" + order: 1 + title: "Query" + default: "airbyte" + include_over_18: + type: "boolean" + description: "Includes mature content" + order: 2 + title: "Include over 18 flag" + default: false + exact: + type: "boolean" + description: "Specifies exact keyword and reduces distractions" + order: 3 + title: "Exact" + limit: + type: "number" + description: "Max records per page limit" + order: 4 + title: "Limit" + default: "1000" + subreddits: + type: "array" + description: "Subreddits for exploration" + order: 5 + title: "Subreddits" + default: + - "r/funny" + - "r/AskReddit" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "reddit" + const: "reddit" + enum: + - "reddit" + order: 0 + type: "string" + source-reddit-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + query: + type: "string" + description: "Specifies the query for searching in reddits and subreddits" + order: 1 + title: "Query" + default: "airbyte" + include_over_18: + type: "boolean" + description: "Includes mature content" + order: 2 + title: "Include over 18 flag" + default: false + exact: + type: "boolean" + description: "Specifies exact keyword and reduces distractions" + order: 3 + title: "Exact" + limit: + type: "number" + description: "Max records per page limit" + order: 4 + title: "Limit" + default: "1000" + subreddits: + type: "array" + description: "Subreddits for exploration" + order: 5 + title: "Subreddits" + default: + - "r/funny" + - "r/AskReddit" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-insightly: + type: "object" + required: + - "start_date" + - "token" + - "sourceType" + properties: + start_date: + type: + - "string" + - "null" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Insightly\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only for incremental streams." + examples: + - "2021-03-01T00:00:00Z" + order: 0 + token: + type: + - "string" + - "null" + title: "API Token" + description: "Your Insightly API token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "insightly" + const: "insightly" + enum: + - "insightly" + order: 0 + type: "string" + source-insightly-update: + type: "object" + required: + - "start_date" + - "token" + properties: + start_date: + type: + - "string" + - "null" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Insightly\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only for incremental streams." + examples: + - "2021-03-01T00:00:00Z" + order: 0 + token: + type: + - "string" + - "null" + title: "API Token" + description: "Your Insightly API token." + airbyte_secret: true + order: 1 + source-cart: + title: "Cart.com Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Central API Router" + type: "object" + order: 0 + required: + - "auth_type" + - "user_name" + - "user_secret" + - "site_id" + properties: + auth_type: + type: "string" + const: "CENTRAL_API_ROUTER" + order: 0 + enum: + - "CENTRAL_API_ROUTER" + user_name: + type: "string" + title: "User Name" + description: "Enter your application's User Name" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + user_secret: + type: "string" + title: "User Secret" + description: "Enter your application's User Secret" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + site_id: + type: "string" + title: "Site ID" + description: + "You can determine a site provisioning site Id by hitting\ + \ https://site.com/store/sitemonitor.aspx and reading the response\ + \ param PSID" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "Single Store Access Token" + type: "object" + order: 1 + required: + - "auth_type" + - "access_token" + - "store_name" + properties: + auth_type: + type: "string" + const: "SINGLE_STORE_ACCESS_TOKEN" + order: 0 + enum: + - "SINGLE_STORE_ACCESS_TOKEN" + access_token: + type: "string" + title: "Access Token" + airbyte_secret: true + order: 1 + description: "Access Token for making authenticated requests." + x-speakeasy-param-sensitive: true + store_name: + type: "string" + title: "Store Name" + order: 2 + description: + "The name of Cart.com Online Store. All API URLs start\ + \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\ + \ is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + sourceType: + title: "cart" + const: "cart" + enum: + - "cart" + order: 0 + type: "string" + source-cart-update: + title: "Cart.com Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Central API Router" + type: "object" + order: 0 + required: + - "auth_type" + - "user_name" + - "user_secret" + - "site_id" + properties: + auth_type: + type: "string" + const: "CENTRAL_API_ROUTER" + order: 0 + enum: + - "CENTRAL_API_ROUTER" + user_name: + type: "string" + title: "User Name" + description: "Enter your application's User Name" + airbyte_secret: true + order: 1 + user_secret: + type: "string" + title: "User Secret" + description: "Enter your application's User Secret" + airbyte_secret: true + order: 2 + site_id: + type: "string" + title: "Site ID" + description: + "You can determine a site provisioning site Id by hitting\ + \ https://site.com/store/sitemonitor.aspx and reading the response\ + \ param PSID" + airbyte_secret: true + order: 3 + - title: "Single Store Access Token" + type: "object" + order: 1 + required: + - "auth_type" + - "access_token" + - "store_name" + properties: + auth_type: + type: "string" + const: "SINGLE_STORE_ACCESS_TOKEN" + order: 0 + enum: + - "SINGLE_STORE_ACCESS_TOKEN" + access_token: + type: "string" + title: "Access Token" + airbyte_secret: true + order: 1 + description: "Access Token for making authenticated requests." + store_name: + type: "string" + title: "Store Name" + order: 2 + description: + "The name of Cart.com Online Store. All API URLs start\ + \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\ + \ is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + source-oracle: + title: "Oracle Source Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sourceType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 1 + port: + title: "Port" + description: + "Port of the database.\nOracle Corporations recommends the\ + \ following port numbers:\n1521 - Default listening port for client connections\ + \ to the listener. \n2484 - Recommended and officially registered listening\ + \ port for client connections to the listener using TCP/IP with SSL" + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + order: 2 + connection_data: + title: "Connect by" + type: "object" + description: "Connect data that will be used for DB connection" + order: 3 + oneOf: + - title: "Service name" + description: "Use service name" + required: + - "service_name" + properties: + connection_type: + type: "string" + const: "service_name" + order: 0 + enum: + - "service_name" + service_name: + title: "Service name" + type: "string" + order: 1 + - title: "System ID (SID)" + description: "Use SID (Oracle System Identifier)" + required: + - "sid" + properties: + connection_type: + type: "string" + const: "sid" + order: 0 + enum: + - "sid" + sid: + title: "System ID (SID)" + type: "string" + order: 1 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 1 + uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method with is used when communicating with\ + \ the database." + order: 8 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines what encryption algorithm is\ + \ used." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM File" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "oracle" + const: "oracle" + enum: + - "oracle" + order: 0 + type: "string" + source-oracle-update: + title: "Oracle Source Spec" + type: "object" + required: + - "host" + - "port" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 1 + port: + title: "Port" + description: + "Port of the database.\nOracle Corporations recommends the\ + \ following port numbers:\n1521 - Default listening port for client connections\ + \ to the listener. \n2484 - Recommended and officially registered listening\ + \ port for client connections to the listener using TCP/IP with SSL" + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + order: 2 + connection_data: + title: "Connect by" + type: "object" + description: "Connect data that will be used for DB connection" + order: 3 + oneOf: + - title: "Service name" + description: "Use service name" + required: + - "service_name" + properties: + connection_type: + type: "string" + const: "service_name" + order: 0 + enum: + - "service_name" + service_name: + title: "Service name" + type: "string" + order: 1 + - title: "System ID (SID)" + description: "Use SID (Oracle System Identifier)" + required: + - "sid" + properties: + connection_type: + type: "string" + const: "sid" + order: 0 + enum: + - "sid" + sid: + title: "System ID (SID)" + type: "string" + order: 1 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 1 + uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method with is used when communicating with\ + \ the database." + order: 8 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines what encryption algorithm is\ + \ used." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM File" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + order: 4 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-appfollow: + type: "object" + required: + - "sourceType" + properties: + api_secret: + type: "string" + description: "API Key provided by Appfollow" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "appfollow" + const: "appfollow" + enum: + - "appfollow" + order: 0 + type: "string" + source-appfollow-update: + type: "object" + required: [] + properties: + api_secret: + type: "string" + description: "API Key provided by Appfollow" + title: "API Key" + airbyte_secret: true + order: 0 + source-chartmogul: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your Chartmogul API key. See the docs for info on how to obtain this." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. When\ + \ feasible, any data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 1 + format: "date-time" + sourceType: + title: "chartmogul" + const: "chartmogul" + enum: + - "chartmogul" + order: 0 + type: "string" + source-chartmogul-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your Chartmogul API key. See the docs for info on how to obtain this." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. When\ + \ feasible, any data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 1 + format: "date-time" + source-coinmarketcap: + type: "object" + required: + - "api_key" + - "data_type" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Your API Key. See here. The token is case sensitive." + order: 0 + x-speakeasy-param-sensitive: true + data_type: + type: "string" + title: "Data type" + enum: + - "latest" + - "historical" + description: + "/latest: Latest market ticker quotes and averages for cryptocurrencies\ + \ and exchanges. /historical: Intervals of historic market data like OHLCV\ + \ data or data for use in charting libraries. See here." + order: 1 + symbols: + type: "array" + title: "Symbol" + items: + type: "string" + description: "Cryptocurrency symbols. (only used for quotes stream)" + minItems: 1 + examples: + - "AVAX" + - "BTC" + order: 2 + sourceType: + title: "coinmarketcap" + const: "coinmarketcap" + enum: + - "coinmarketcap" + order: 0 + type: "string" + source-coinmarketcap-update: + type: "object" + required: + - "api_key" + - "data_type" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Your API Key. See here. The token is case sensitive." + order: 0 + data_type: + type: "string" + title: "Data type" + enum: + - "latest" + - "historical" + description: + "/latest: Latest market ticker quotes and averages for cryptocurrencies\ + \ and exchanges. /historical: Intervals of historic market data like OHLCV\ + \ data or data for use in charting libraries. See here." + order: 1 + symbols: + type: "array" + title: "Symbol" + items: + type: "string" + description: "Cryptocurrency symbols. (only used for quotes stream)" + minItems: 1 + examples: + - "AVAX" + - "BTC" + order: 2 + source-dixa: + type: "object" + required: + - "api_token" + - "start_date" + - "sourceType" + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The connector pulls records updated from this date onwards." + examples: + - "YYYY-MM-DD" + order: 3 + sourceType: + title: "dixa" + const: "dixa" + enum: + - "dixa" + order: 0 + type: "string" + source-dixa-update: + type: "object" + required: + - "api_token" + - "start_date" + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + order: 1 + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The connector pulls records updated from this date onwards." + examples: + - "YYYY-MM-DD" + order: 3 + source-freshcaller: + title: "Freshcaller Spec" + type: "object" + required: + - "domain" + - "api_key" + - "sourceType" + properties: + domain: + type: "string" + title: "Domain for Freshcaller account" + description: "Used to construct Base URL for the Freshcaller APIs" + examples: + - "snaptravel" + api_key: + type: "string" + title: "API Key" + description: + "Freshcaller API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + description: + "UTC date and time. Any data created after this date will be\ + \ replicated." + format: "date-time" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-01-01T12:00:00Z" + sync_lag_minutes: + title: "Lag in minutes for each sync" + type: "integer" + description: + "Lag in minutes for each sync, i.e., at time T, data for the\ + \ time range [prev_sync_time, T-30] will be fetched" + sourceType: + title: "freshcaller" + const: "freshcaller" + enum: + - "freshcaller" + order: 0 + type: "string" + source-freshcaller-update: + title: "Freshcaller Spec" + type: "object" + required: + - "domain" + - "api_key" + properties: + domain: + type: "string" + title: "Domain for Freshcaller account" + description: "Used to construct Base URL for the Freshcaller APIs" + examples: + - "snaptravel" + api_key: + type: "string" + title: "API Key" + description: + "Freshcaller API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + description: + "UTC date and time. Any data created after this date will be\ + \ replicated." + format: "date-time" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-01-01T12:00:00Z" + sync_lag_minutes: + title: "Lag in minutes for each sync" + type: "integer" + description: + "Lag in minutes for each sync, i.e., at time T, data for the\ + \ time range [prev_sync_time, T-30] will be fetched" + source-recharge: + title: "Recharge Spec" + type: "object" + required: + - "start_date" + - "access_token" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Recharge\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will\ + \ not be replicated." + examples: + - "2021-05-14T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the Access Token generated. See the docs for\ + \ more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + use_orders_deprecated_api: + type: "boolean" + title: "Use `Orders` Deprecated API" + description: + "Define whether or not the `Orders` stream should use the deprecated\ + \ `2021-01` API version, or use `2021-11`, otherwise." + default: true + sourceType: + title: "recharge" + const: "recharge" + enum: + - "recharge" + order: 0 + type: "string" + source-recharge-update: + title: "Recharge Spec" + type: "object" + required: + - "start_date" + - "access_token" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Recharge\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will\ + \ not be replicated." + examples: + - "2021-05-14T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the Access Token generated. See the docs for\ + \ more information." + airbyte_secret: true + use_orders_deprecated_api: + type: "boolean" + title: "Use `Orders` Deprecated API" + description: + "Define whether or not the `Orders` stream should use the deprecated\ + \ `2021-01` API version, or use `2021-11`, otherwise." + default: true + source-aha: + type: "object" + required: + - "api_key" + - "url" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Bearer Token" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + order: 1 + sourceType: + title: "aha" + const: "aha" + enum: + - "aha" + order: 0 + type: "string" + source-aha-update: + type: "object" + required: + - "api_key" + - "url" + properties: + api_key: + type: "string" + title: "API Bearer Token" + airbyte_secret: true + description: "API Key" + order: 0 + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + order: 1 + source-brevo: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "brevo" + const: "brevo" + enum: + - "brevo" + order: 0 + type: "string" + source-brevo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-datascope: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "Authorization" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + order: 1 + sourceType: + title: "datascope" + const: "datascope" + enum: + - "datascope" + order: 0 + type: "string" + source-datascope-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "Authorization" + airbyte_secret: true + description: "API Key" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + order: 1 + source-metabase: + type: "object" + required: + - "instance_api_url" + - "username" + - "sourceType" + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "https://localhost:3000/api/" + pattern: "^https://" + order: 0 + username: + type: "string" + title: "Username" + order: 1 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + session_token: + type: "string" + description: + "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + sourceType: + title: "metabase" + const: "metabase" + enum: + - "metabase" + order: 0 + type: "string" + source-metabase-update: + type: "object" + required: + - "instance_api_url" + - "username" + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "https://localhost:3000/api/" + pattern: "^https://" + order: 0 + username: + type: "string" + title: "Username" + order: 1 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 2 + session_token: + type: "string" + description: + "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + source-bing-ads: + title: "Bing Ads Spec" + type: "object" + required: + - "developer_token" + - "client_id" + - "refresh_token" + - "sourceType" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + tenant_id: + type: "string" + title: "Tenant ID" + description: + "The Tenant ID of your Microsoft Advertising developer application.\ + \ Set this to \"common\" unless you know you need a different value." + airbyte_secret: true + default: "common" + order: 0 + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + developer_token: + type: "string" + title: "Developer Token" + description: + "Developer token associated with user. See more info in the docs." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + account_names: + title: "Account Names Predicates" + description: "Predicates that will be used to sync data by specific accounts." + type: "array" + order: 5 + items: + description: "Account Names Predicates Config." + type: "object" + properties: + operator: + title: "Operator" + description: + "An Operator that will be used to filter accounts. The\ + \ Contains predicate has features for matching words, matching inflectional\ + \ forms of words, searching using wildcard characters, and searching\ + \ using proximity. The Equals is used to return all rows where account\ + \ name is equal(=) to the string that you provided" + type: "string" + enum: + - "Contains" + - "Equals" + name: + title: "Account Name" + description: + "Account Name is a string value for comparing with the\ + \ specified predicate." + type: "string" + required: + - "operator" + - "name" + reports_start_date: + type: "string" + title: "Reports replication start date" + format: "date" + description: + "The start date from which to begin replicating report data.\ + \ Any data generated before this date will not be replicated in reports.\ + \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\ + \ and current calendar year will be replicated." + order: 6 + lookback_window: + title: "Lookback window" + description: + "Also known as attribution or conversion window. How far into\ + \ the past to look for records (in days). If your conversion window has\ + \ an hours/minutes granularity, round it up to the number of days exceeding.\ + \ Used only for performance report streams in incremental mode without\ + \ specified Reports Start Date." + type: "integer" + default: 0 + minimum: 0 + maximum: 90 + order: 7 + custom_reports: + title: "Custom Reports" + description: "You can add your Custom Bing Ads report by creating one." + order: 8 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Report Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + examples: + - "Account Performance" + - "AdDynamicTextPerformanceReport" + - "custom report" + reporting_object: + title: "Reporting Data Object" + description: + "The name of the the object derives from the ReportRequest\ + \ object. You can find it in Bing Ads Api docs - Reporting API -\ + \ Reporting Data Objects." + type: "string" + enum: + - "AccountPerformanceReportRequest" + - "AdDynamicTextPerformanceReportRequest" + - "AdExtensionByAdReportRequest" + - "AdExtensionByKeywordReportRequest" + - "AdExtensionDetailReportRequest" + - "AdGroupPerformanceReportRequest" + - "AdPerformanceReportRequest" + - "AgeGenderAudienceReportRequest" + - "AudiencePerformanceReportRequest" + - "CallDetailReportRequest" + - "CampaignPerformanceReportRequest" + - "ConversionPerformanceReportRequest" + - "DestinationUrlPerformanceReportRequest" + - "DSAAutoTargetPerformanceReportRequest" + - "DSACategoryPerformanceReportRequest" + - "DSASearchQueryPerformanceReportRequest" + - "GeographicPerformanceReportRequest" + - "GoalsAndFunnelsReportRequest" + - "HotelDimensionPerformanceReportRequest" + - "HotelGroupPerformanceReportRequest" + - "KeywordPerformanceReportRequest" + - "NegativeKeywordConflictReportRequest" + - "ProductDimensionPerformanceReportRequest" + - "ProductMatchCountReportRequest" + - "ProductNegativeKeywordConflictReportRequest" + - "ProductPartitionPerformanceReportRequest" + - "ProductPartitionUnitPerformanceReportRequest" + - "ProductSearchQueryPerformanceReportRequest" + - "ProfessionalDemographicsAudienceReportRequest" + - "PublisherUsagePerformanceReportRequest" + - "SearchCampaignChangeHistoryReportRequest" + - "SearchQueryPerformanceReportRequest" + - "ShareOfVoiceReportRequest" + - "UserLocationPerformanceReportRequest" + report_columns: + title: "Columns" + description: + "A list of available report object columns. You can find\ + \ it in description of reporting object that you want to add to\ + \ custom report." + type: "array" + items: + description: "Name of report column." + type: "string" + minItems: 1 + report_aggregation: + title: "Aggregation" + description: "A list of available aggregations." + type: "string" + items: + title: "ValidEnums" + description: "An enumeration of aggregations." + enum: + - "Hourly" + - "Daily" + - "Weekly" + - "Monthly" + - "DayOfWeek" + - "HourOfDay" + - "WeeklyStartingMonday" + - "Summary" + default: + - "Hourly" + required: + - "name" + - "reporting_object" + - "report_columns" + - "report_aggregation" + sourceType: + title: "bing-ads" + const: "bing-ads" + enum: + - "bing-ads" + order: 0 + type: "string" + source-bing-ads-update: + title: "Bing Ads Spec" + type: "object" + required: + - "developer_token" + - "client_id" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + tenant_id: + type: "string" + title: "Tenant ID" + description: + "The Tenant ID of your Microsoft Advertising developer application.\ + \ Set this to \"common\" unless you know you need a different value." + airbyte_secret: true + default: "common" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 3 + developer_token: + type: "string" + title: "Developer Token" + description: + "Developer token associated with user. See more info in the docs." + airbyte_secret: true + order: 4 + account_names: + title: "Account Names Predicates" + description: "Predicates that will be used to sync data by specific accounts." + type: "array" + order: 5 + items: + description: "Account Names Predicates Config." + type: "object" + properties: + operator: + title: "Operator" + description: + "An Operator that will be used to filter accounts. The\ + \ Contains predicate has features for matching words, matching inflectional\ + \ forms of words, searching using wildcard characters, and searching\ + \ using proximity. The Equals is used to return all rows where account\ + \ name is equal(=) to the string that you provided" + type: "string" + enum: + - "Contains" + - "Equals" + name: + title: "Account Name" + description: + "Account Name is a string value for comparing with the\ + \ specified predicate." + type: "string" + required: + - "operator" + - "name" + reports_start_date: + type: "string" + title: "Reports replication start date" + format: "date" + description: + "The start date from which to begin replicating report data.\ + \ Any data generated before this date will not be replicated in reports.\ + \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\ + \ and current calendar year will be replicated." + order: 6 + lookback_window: + title: "Lookback window" + description: + "Also known as attribution or conversion window. How far into\ + \ the past to look for records (in days). If your conversion window has\ + \ an hours/minutes granularity, round it up to the number of days exceeding.\ + \ Used only for performance report streams in incremental mode without\ + \ specified Reports Start Date." + type: "integer" + default: 0 + minimum: 0 + maximum: 90 + order: 7 + custom_reports: + title: "Custom Reports" + description: "You can add your Custom Bing Ads report by creating one." + order: 8 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Report Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + examples: + - "Account Performance" + - "AdDynamicTextPerformanceReport" + - "custom report" + reporting_object: + title: "Reporting Data Object" + description: + "The name of the the object derives from the ReportRequest\ + \ object. You can find it in Bing Ads Api docs - Reporting API -\ + \ Reporting Data Objects." + type: "string" + enum: + - "AccountPerformanceReportRequest" + - "AdDynamicTextPerformanceReportRequest" + - "AdExtensionByAdReportRequest" + - "AdExtensionByKeywordReportRequest" + - "AdExtensionDetailReportRequest" + - "AdGroupPerformanceReportRequest" + - "AdPerformanceReportRequest" + - "AgeGenderAudienceReportRequest" + - "AudiencePerformanceReportRequest" + - "CallDetailReportRequest" + - "CampaignPerformanceReportRequest" + - "ConversionPerformanceReportRequest" + - "DestinationUrlPerformanceReportRequest" + - "DSAAutoTargetPerformanceReportRequest" + - "DSACategoryPerformanceReportRequest" + - "DSASearchQueryPerformanceReportRequest" + - "GeographicPerformanceReportRequest" + - "GoalsAndFunnelsReportRequest" + - "HotelDimensionPerformanceReportRequest" + - "HotelGroupPerformanceReportRequest" + - "KeywordPerformanceReportRequest" + - "NegativeKeywordConflictReportRequest" + - "ProductDimensionPerformanceReportRequest" + - "ProductMatchCountReportRequest" + - "ProductNegativeKeywordConflictReportRequest" + - "ProductPartitionPerformanceReportRequest" + - "ProductPartitionUnitPerformanceReportRequest" + - "ProductSearchQueryPerformanceReportRequest" + - "ProfessionalDemographicsAudienceReportRequest" + - "PublisherUsagePerformanceReportRequest" + - "SearchCampaignChangeHistoryReportRequest" + - "SearchQueryPerformanceReportRequest" + - "ShareOfVoiceReportRequest" + - "UserLocationPerformanceReportRequest" + report_columns: + title: "Columns" + description: + "A list of available report object columns. You can find\ + \ it in description of reporting object that you want to add to\ + \ custom report." + type: "array" + items: + description: "Name of report column." + type: "string" + minItems: 1 + report_aggregation: + title: "Aggregation" + description: "A list of available aggregations." + type: "string" + items: + title: "ValidEnums" + description: "An enumeration of aggregations." + enum: + - "Hourly" + - "Daily" + - "Weekly" + - "Monthly" + - "DayOfWeek" + - "HourOfDay" + - "WeeklyStartingMonday" + - "Summary" + default: + - "Hourly" + required: + - "name" + - "reporting_object" + - "report_columns" + - "report_aggregation" + source-monday: + title: "Monday Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + subdomain: + type: "string" + title: "Subdomain/Slug" + description: + "Slug/subdomain of the account, or the first part of\ + \ the URL that comes before .monday.com" + default: "" + order: 0 + auth_type: + type: "string" + const: "oauth2.0" + order: 1 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: "API Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "monday" + const: "monday" + enum: + - "monday" + order: 0 + type: "string" + source-monday-update: + title: "Monday Spec" + type: "object" + required: [] + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + subdomain: + type: "string" + title: "Subdomain/Slug" + description: + "Slug/subdomain of the account, or the first part of\ + \ the URL that comes before .monday.com" + default: "" + order: 0 + auth_type: + type: "string" + const: "oauth2.0" + order: 1 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: "API Token for making authenticated requests." + airbyte_secret: true + source-algolia: + type: "object" + required: + - "api_key" + - "application_id" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + application_id: + type: "string" + description: "The application ID for your application found in settings" + order: 1 + title: "Application ID" + search_query: + type: "string" + description: + "Search query to be used with indexes_query stream with format\ + \ defined in `https://www.algolia.com/doc/rest-api/search/#tag/Search/operation/searchSingleIndex`" + order: 2 + title: "Indexes Search query" + default: "hitsPerPage=2&getRankingInfo=1" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + object_id: + type: "string" + description: "Object ID within index for search queries" + order: 4 + title: "Object ID" + default: "ecommerce-sample-data-9999996" + sourceType: + title: "algolia" + const: "algolia" + enum: + - "algolia" + order: 0 + type: "string" + source-algolia-update: + type: "object" + required: + - "api_key" + - "application_id" + - "start_date" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + application_id: + type: "string" + description: "The application ID for your application found in settings" + order: 1 + title: "Application ID" + search_query: + type: "string" + description: + "Search query to be used with indexes_query stream with format\ + \ defined in `https://www.algolia.com/doc/rest-api/search/#tag/Search/operation/searchSingleIndex`" + order: 2 + title: "Indexes Search query" + default: "hitsPerPage=2&getRankingInfo=1" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + object_id: + type: "string" + description: "Object ID within index for search queries" + order: 4 + title: "Object ID" + default: "ecommerce-sample-data-9999996" + source-amplitude: + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + - "sourceType" + properties: + data_region: + type: "string" + title: "Data region" + description: "Amplitude data region server" + enum: + - "Standard Server" + - "EU Residency Server" + default: "Standard Server" + api_key: + type: "string" + title: "API Key" + description: + "Amplitude API Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + secret_key: + type: "string" + title: "Secret Key" + description: + "Amplitude Secret Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + format: "date-time" + title: "Replication Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + request_time_range: + type: "integer" + title: "Request time range" + description: + "According to Considerations too big time range in request can cause a timeout\ + \ error. In this case, set shorter time interval in hours." + default: 24 + minimum: 1 + maximum: 8760 + sourceType: + title: "amplitude" + const: "amplitude" + enum: + - "amplitude" + order: 0 + type: "string" + source-amplitude-update: + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + properties: + data_region: + type: "string" + title: "Data region" + description: "Amplitude data region server" + enum: + - "Standard Server" + - "EU Residency Server" + default: "Standard Server" + api_key: + type: "string" + title: "API Key" + description: + "Amplitude API Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + secret_key: + type: "string" + title: "Secret Key" + description: + "Amplitude Secret Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + start_date: + type: "string" + format: "date-time" + title: "Replication Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + request_time_range: + type: "integer" + title: "Request time range" + description: + "According to Considerations too big time range in request can cause a timeout\ + \ error. In this case, set shorter time interval in hours." + default: 24 + minimum: 1 + maximum: 8760 + source-google-pagespeed-insights: + type: "object" + required: + - "categories" + - "strategies" + - "urls" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited\ + \ when using without API Key. Creating and using the API key therefore\ + \ is recommended. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + categories: + type: "array" + items: + type: "string" + enum: + - "accessibility" + - "best-practices" + - "performance" + - "pwa" + - "seo" + title: "Lighthouse Categories" + description: + "Defines which Lighthouse category to run. One or many of:\ + \ \"accessibility\", \"best-practices\", \"performance\", \"pwa\", \"\ + seo\"." + order: 1 + strategies: + type: "array" + items: + type: "string" + enum: + - "desktop" + - "mobile" + title: "Analyses Strategies" + description: + "The analyses strategy to use. Either \"desktop\" or \"mobile\"\ + ." + order: 2 + urls: + type: "array" + items: + type: "string" + pattern: + "^(?:origin:)?(http(s)?:\\/\\/)[\\w.-]+(?:\\.[\\w\\.-]+)+[\\\ + w\\-\\._~:\\/?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$" + title: "URLs to analyse" + description: + "The URLs to retrieve pagespeed information from. The connector\ + \ will attempt to sync PageSpeed reports for all the defined URLs. Format:\ + \ https://(www.)url.domain" + example: "https://example.com" + order: 3 + sourceType: + title: "google-pagespeed-insights" + const: "google-pagespeed-insights" + enum: + - "google-pagespeed-insights" + order: 0 + type: "string" + source-google-pagespeed-insights-update: + type: "object" + required: + - "categories" + - "strategies" + - "urls" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited\ + \ when using without API Key. Creating and using the API key therefore\ + \ is recommended. The key is case sensitive." + airbyte_secret: true + order: 0 + categories: + type: "array" + items: + type: "string" + enum: + - "accessibility" + - "best-practices" + - "performance" + - "pwa" + - "seo" + title: "Lighthouse Categories" + description: + "Defines which Lighthouse category to run. One or many of:\ + \ \"accessibility\", \"best-practices\", \"performance\", \"pwa\", \"\ + seo\"." + order: 1 + strategies: + type: "array" + items: + type: "string" + enum: + - "desktop" + - "mobile" + title: "Analyses Strategies" + description: + "The analyses strategy to use. Either \"desktop\" or \"mobile\"\ + ." + order: 2 + urls: + type: "array" + items: + type: "string" + pattern: + "^(?:origin:)?(http(s)?:\\/\\/)[\\w.-]+(?:\\.[\\w\\.-]+)+[\\\ + w\\-\\._~:\\/?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$" + title: "URLs to analyse" + description: + "The URLs to retrieve pagespeed information from. The connector\ + \ will attempt to sync PageSpeed reports for all the defined URLs. Format:\ + \ https://(www.)url.domain" + example: "https://example.com" + order: 3 + source-savvycal: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Go to SavvyCal → Settings → Developer → Personal Tokens and\ + \ make a new token. Then, copy the private key. https://savvycal.com/developers" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "savvycal" + const: "savvycal" + enum: + - "savvycal" + order: 0 + type: "string" + source-savvycal-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Go to SavvyCal → Settings → Developer → Personal Tokens and\ + \ make a new token. Then, copy the private key. https://savvycal.com/developers" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-shortcut: + type: "object" + required: + - "api_key_2" + - "start_date" + - "sourceType" + properties: + api_key_2: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + query: + type: "string" + description: "Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`" + title: "Query" + default: "title:Our first Epic" + order: 2 + sourceType: + title: "shortcut" + const: "shortcut" + enum: + - "shortcut" + order: 0 + type: "string" + source-shortcut-update: + type: "object" + required: + - "api_key_2" + - "start_date" + properties: + api_key_2: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + query: + type: "string" + description: "Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`" + title: "Query" + default: "title:Our first Epic" + order: 2 + source-pipedrive: + title: "Pipedrive Spec" + type: "object" + required: + - "replication_start_date" + - "api_token" + - "sourceType" + properties: + api_token: + title: "API Token" + type: "string" + description: "The Pipedrive API Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + examples: + - "2017-01-25 00:00:00Z" + type: "string" + sourceType: + title: "pipedrive" + const: "pipedrive" + enum: + - "pipedrive" + order: 0 + type: "string" + source-pipedrive-update: + title: "Pipedrive Spec" + type: "object" + required: + - "replication_start_date" + - "api_token" + properties: + api_token: + title: "API Token" + type: "string" + description: "The Pipedrive API Token." + airbyte_secret: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + examples: + - "2017-01-25 00:00:00Z" + type: "string" + source-amazon-ads: + title: "Amazon Ads Spec" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + enum: + - "NA" + - "EU" + - "FE" + type: "string" + default: "NA" + order: 4 + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: "string" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + items: + type: "string" + enum: + - "enabled" + - "paused" + - "archived" + type: "array" + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + items: + type: "string" + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "array" + uniqueItems: true + order: 10 + sourceType: + title: "amazon-ads" + const: "amazon-ads" + enum: + - "amazon-ads" + order: 0 + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + source-amazon-ads-update: + title: "Amazon Ads Spec" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + enum: + - "NA" + - "EU" + - "FE" + type: "string" + default: "NA" + order: 4 + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: "string" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + items: + type: "string" + enum: + - "enabled" + - "paused" + - "archived" + type: "array" + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + items: + type: "string" + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "array" + uniqueItems: true + order: 10 + required: + - "client_id" + - "client_secret" + - "refresh_token" + source-sendinblue: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "sendinblue" + const: "sendinblue" + enum: + - "sendinblue" + order: 0 + type: "string" + source-sendinblue-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 0 + source-github: + title: "GitHub Source Spec" + type: "object" + required: + - "credentials" + - "repositories" + - "sourceType" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to GitHub" + type: "object" + order: 0 + group: "auth" + oneOf: + - type: "object" + title: "OAuth" + required: + - "access_token" + properties: + option_title: + type: "string" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "OAuth access token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + const: "PAT Credentials" + order: 0 + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: + "Log into GitHub and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + x-speakeasy-param-sensitive: true + repositories: + type: "array" + items: + type: "string" + pattern: "^[\\w.-]+/(([\\w.-]*\\*)|[\\w.-]+(?docs for more info" + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + order: 2 + format: "date-time" + api_url: + type: "string" + examples: + - "https://github.com" + - "https://github.company.org" + title: "API URL" + default: "https://api.github.com/" + description: + "Please enter your basic URL from self-hosted GitHub instance\ + \ or leave it empty to use GitHub." + order: 3 + branches: + type: "array" + items: + type: "string" + title: "Branches" + examples: + - "airbytehq/airbyte/master" + - "airbytehq/airbyte/my-branch" + description: + "List of GitHub repository branches to pull commits for, e.g.\ + \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\ + \ the default branch will be pulled." + order: 4 + pattern_descriptor: "org/repo/branch1 org/repo/branch2" + max_waiting_time: + type: "integer" + title: "Max Waiting Time (in minutes)" + examples: + - 10 + - 30 + - 60 + default: 10 + minimum: 1 + maximum: 60 + description: + "Max Waiting Time for rate limit. Set higher value to wait\ + \ till rate limits will be resetted to continue sync" + order: 5 + sourceType: + title: "github" + const: "github" + enum: + - "github" + order: 0 + type: "string" + source-github-update: + title: "GitHub Source Spec" + type: "object" + required: + - "credentials" + - "repositories" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to GitHub" + type: "object" + order: 0 + group: "auth" + oneOf: + - type: "object" + title: "OAuth" + required: + - "access_token" + properties: + option_title: + type: "string" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "OAuth access token" + airbyte_secret: true + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + - type: "object" + title: "Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + const: "PAT Credentials" + order: 0 + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: + "Log into GitHub and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + repositories: + type: "array" + items: + type: "string" + pattern: "^[\\w.-]+/(([\\w.-]*\\*)|[\\w.-]+(?docs for more info" + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + order: 2 + format: "date-time" + api_url: + type: "string" + examples: + - "https://github.com" + - "https://github.company.org" + title: "API URL" + default: "https://api.github.com/" + description: + "Please enter your basic URL from self-hosted GitHub instance\ + \ or leave it empty to use GitHub." + order: 3 + branches: + type: "array" + items: + type: "string" + title: "Branches" + examples: + - "airbytehq/airbyte/master" + - "airbytehq/airbyte/my-branch" + description: + "List of GitHub repository branches to pull commits for, e.g.\ + \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\ + \ the default branch will be pulled." + order: 4 + pattern_descriptor: "org/repo/branch1 org/repo/branch2" + max_waiting_time: + type: "integer" + title: "Max Waiting Time (in minutes)" + examples: + - 10 + - 30 + - 60 + default: 10 + minimum: 1 + maximum: 60 + description: + "Max Waiting Time for rate limit. Set higher value to wait\ + \ till rate limits will be resetted to continue sync" + order: 5 + source-guru: + type: "object" + required: + - "username" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + team_id: + type: "string" + description: + "Team ID received through response of /teams streams, make\ + \ sure about access to the team" + order: 3 + title: "team_id" + search_cards_query: + type: "string" + description: "Query for searching cards" + order: 4 + title: "search_cards_query" + sourceType: + title: "guru" + const: "guru" + enum: + - "guru" + order: 0 + type: "string" + source-guru-update: + type: "object" + required: + - "username" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + team_id: + type: "string" + description: + "Team ID received through response of /teams streams, make\ + \ sure about access to the team" + order: 3 + title: "team_id" + search_cards_query: + type: "string" + description: "Query for searching cards" + order: 4 + title: "search_cards_query" + source-bigquery: + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + - "sourceType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: + "The dataset ID to search for tables and views. If you are\ + \ only loading data from one dataset, setting this option could result\ + \ in much faster schema discovery." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: + "The contents of your Service Account Key JSON file. See the\ + \ docs for more information on how to obtain this key." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "bigquery" + const: "bigquery" + enum: + - "bigquery" + order: 0 + type: "string" + source-bigquery-update: + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: + "The dataset ID to search for tables and views. If you are\ + \ only loading data from one dataset, setting this option could result\ + \ in much faster schema discovery." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: + "The contents of your Service Account Key JSON file. See the\ + \ docs for more information on how to obtain this key." + title: "Credentials JSON" + airbyte_secret: true + source-vantage: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API Access token. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "vantage" + const: "vantage" + enum: + - "vantage" + order: 0 + type: "string" + source-vantage-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API Access token. See here." + airbyte_secret: true + order: 0 + source-calendly: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Go to Integrations → API & Webhooks to obtain your bearer\ + \ token. https://calendly.com/integrations/api_webhooks" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "calendly" + const: "calendly" + enum: + - "calendly" + order: 0 + type: "string" + source-calendly-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "Go to Integrations → API & Webhooks to obtain your bearer\ + \ token. https://calendly.com/integrations/api_webhooks" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-picqer: + type: "object" + required: + - "username" + - "organization_name" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + title: "Username" + order: 0 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + organization_name: + type: "string" + description: "The organization name which is used to login to picqer" + title: "Organization Name" + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "picqer" + const: "picqer" + enum: + - "picqer" + order: 0 + type: "string" + source-picqer-update: + type: "object" + required: + - "username" + - "organization_name" + - "start_date" + properties: + username: + type: "string" + title: "Username" + order: 0 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 1 + organization_name: + type: "string" + description: "The organization name which is used to login to picqer" + title: "Organization Name" + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + source-firebolt: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + x-speakeasy-param-sensitive: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + sourceType: + title: "firebolt" + const: "firebolt" + enum: + - "firebolt" + order: 0 + type: "string" + source-firebolt-update: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + source-clazar: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "clazar" + const: "clazar" + enum: + - "clazar" + order: 0 + type: "string" + source-clazar-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + source-outreach: + title: "Source Outreach Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "redirect_uri" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Outreach developer application." + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Outreach developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining the new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + redirect_uri: + type: "string" + title: "Redirect URI" + description: + "A Redirect URI is the location where the authorization server\ + \ sends the user once the app has been successfully authorized and granted\ + \ an authorization code or access token." + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Outreach\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "outreach" + const: "outreach" + enum: + - "outreach" + order: 0 + type: "string" + source-outreach-update: + title: "Source Outreach Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "redirect_uri" + - "start_date" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Outreach developer application." + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Outreach developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining the new access token." + airbyte_secret: true + redirect_uri: + type: "string" + title: "Redirect URI" + description: + "A Redirect URI is the location where the authorization server\ + \ sends the user once the app has been successfully authorized and granted\ + \ an authorization code or access token." + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Outreach\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-pokeapi: + type: "object" + required: + - "pokemon_name" + - "sourceType" + properties: + pokemon_name: + type: "string" + description: "Pokemon requested from the API." + title: "Pokemon Name" + pattern: "^[a-z0-9_\\-]+$" + enum: + - "bulbasaur" + - "ivysaur" + - "venusaur" + - "charmander" + - "charmeleon" + - "charizard" + - "squirtle" + - "wartortle" + - "blastoise" + - "caterpie" + - "metapod" + - "butterfree" + - "weedle" + - "kakuna" + - "beedrill" + - "pidgey" + - "pidgeotto" + - "pidgeot" + - "rattata" + - "raticate" + - "spearow" + - "fearow" + - "ekans" + - "arbok" + - "pikachu" + - "raichu" + - "sandshrew" + - "sandslash" + - "nidoranf" + - "nidorina" + - "nidoqueen" + - "nidoranm" + - "nidorino" + - "nidoking" + - "clefairy" + - "clefable" + - "vulpix" + - "ninetales" + - "jigglypuff" + - "wigglytuff" + - "zubat" + - "golbat" + - "oddish" + - "gloom" + - "vileplume" + - "paras" + - "parasect" + - "venonat" + - "venomoth" + - "diglett" + - "dugtrio" + - "meowth" + - "persian" + - "psyduck" + - "golduck" + - "mankey" + - "primeape" + - "growlithe" + - "arcanine" + - "poliwag" + - "poliwhirl" + - "poliwrath" + - "abra" + - "kadabra" + - "alakazam" + - "machop" + - "machoke" + - "machamp" + - "bellsprout" + - "weepinbell" + - "victreebel" + - "tentacool" + - "tentacruel" + - "geodude" + - "graveler" + - "golem" + - "ponyta" + - "rapidash" + - "slowpoke" + - "slowbro" + - "magnemite" + - "magneton" + - "farfetchd" + - "doduo" + - "dodrio" + - "seel" + - "dewgong" + - "grimer" + - "muk" + - "shellder" + - "cloyster" + - "gastly" + - "haunter" + - "gengar" + - "onix" + - "drowzee" + - "hypno" + - "krabby" + - "kingler" + - "voltorb" + - "electrode" + - "exeggcute" + - "exeggutor" + - "cubone" + - "marowak" + - "hitmonlee" + - "hitmonchan" + - "lickitung" + - "koffing" + - "weezing" + - "rhyhorn" + - "rhydon" + - "chansey" + - "tangela" + - "kangaskhan" + - "horsea" + - "seadra" + - "goldeen" + - "seaking" + - "staryu" + - "starmie" + - "mrmime" + - "scyther" + - "jynx" + - "electabuzz" + - "magmar" + - "pinsir" + - "tauros" + - "magikarp" + - "gyarados" + - "lapras" + - "ditto" + - "eevee" + - "vaporeon" + - "jolteon" + - "flareon" + - "porygon" + - "omanyte" + - "omastar" + - "kabuto" + - "kabutops" + - "aerodactyl" + - "snorlax" + - "articuno" + - "zapdos" + - "moltres" + - "dratini" + - "dragonair" + - "dragonite" + - "mewtwo" + - "mew" + - "chikorita" + - "bayleef" + - "meganium" + - "cyndaquil" + - "quilava" + - "typhlosion" + - "totodile" + - "croconaw" + - "feraligatr" + - "sentret" + - "furret" + - "hoothoot" + - "noctowl" + - "ledyba" + - "ledian" + - "spinarak" + - "ariados" + - "crobat" + - "chinchou" + - "lanturn" + - "pichu" + - "cleffa" + - "igglybuff" + - "togepi" + - "togetic" + - "natu" + - "xatu" + - "mareep" + - "flaaffy" + - "ampharos" + - "bellossom" + - "marill" + - "azumarill" + - "sudowoodo" + - "politoed" + - "hoppip" + - "skiploom" + - "jumpluff" + - "aipom" + - "sunkern" + - "sunflora" + - "yanma" + - "wooper" + - "quagsire" + - "espeon" + - "umbreon" + - "murkrow" + - "slowking" + - "misdreavus" + - "unown" + - "wobbuffet" + - "girafarig" + - "pineco" + - "forretress" + - "dunsparce" + - "gligar" + - "steelix" + - "snubbull" + - "granbull" + - "qwilfish" + - "scizor" + - "shuckle" + - "heracross" + - "sneasel" + - "teddiursa" + - "ursaring" + - "slugma" + - "magcargo" + - "swinub" + - "piloswine" + - "corsola" + - "remoraid" + - "octillery" + - "delibird" + - "mantine" + - "skarmory" + - "houndour" + - "houndoom" + - "kingdra" + - "phanpy" + - "donphan" + - "porygon2" + - "stantler" + - "smeargle" + - "tyrogue" + - "hitmontop" + - "smoochum" + - "elekid" + - "magby" + - "miltank" + - "blissey" + - "raikou" + - "entei" + - "suicune" + - "larvitar" + - "pupitar" + - "tyranitar" + - "lugia" + - "ho-oh" + - "celebi" + - "treecko" + - "grovyle" + - "sceptile" + - "torchic" + - "combusken" + - "blaziken" + - "mudkip" + - "marshtomp" + - "swampert" + - "poochyena" + - "mightyena" + - "zigzagoon" + - "linoone" + - "wurmple" + - "silcoon" + - "beautifly" + - "cascoon" + - "dustox" + - "lotad" + - "lombre" + - "ludicolo" + - "seedot" + - "nuzleaf" + - "shiftry" + - "taillow" + - "swellow" + - "wingull" + - "pelipper" + - "ralts" + - "kirlia" + - "gardevoir" + - "surskit" + - "masquerain" + - "shroomish" + - "breloom" + - "slakoth" + - "vigoroth" + - "slaking" + - "nincada" + - "ninjask" + - "shedinja" + - "whismur" + - "loudred" + - "exploud" + - "makuhita" + - "hariyama" + - "azurill" + - "nosepass" + - "skitty" + - "delcatty" + - "sableye" + - "mawile" + - "aron" + - "lairon" + - "aggron" + - "meditite" + - "medicham" + - "electrike" + - "manectric" + - "plusle" + - "minun" + - "volbeat" + - "illumise" + - "roselia" + - "gulpin" + - "swalot" + - "carvanha" + - "sharpedo" + - "wailmer" + - "wailord" + - "numel" + - "camerupt" + - "torkoal" + - "spoink" + - "grumpig" + - "spinda" + - "trapinch" + - "vibrava" + - "flygon" + - "cacnea" + - "cacturne" + - "swablu" + - "altaria" + - "zangoose" + - "seviper" + - "lunatone" + - "solrock" + - "barboach" + - "whiscash" + - "corphish" + - "crawdaunt" + - "baltoy" + - "claydol" + - "lileep" + - "cradily" + - "anorith" + - "armaldo" + - "feebas" + - "milotic" + - "castform" + - "kecleon" + - "shuppet" + - "banette" + - "duskull" + - "dusclops" + - "tropius" + - "chimecho" + - "absol" + - "wynaut" + - "snorunt" + - "glalie" + - "spheal" + - "sealeo" + - "walrein" + - "clamperl" + - "huntail" + - "gorebyss" + - "relicanth" + - "luvdisc" + - "bagon" + - "shelgon" + - "salamence" + - "beldum" + - "metang" + - "metagross" + - "regirock" + - "regice" + - "registeel" + - "latias" + - "latios" + - "kyogre" + - "groudon" + - "rayquaza" + - "jirachi" + - "deoxys" + - "turtwig" + - "grotle" + - "torterra" + - "chimchar" + - "monferno" + - "infernape" + - "piplup" + - "prinplup" + - "empoleon" + - "starly" + - "staravia" + - "staraptor" + - "bidoof" + - "bibarel" + - "kricketot" + - "kricketune" + - "shinx" + - "luxio" + - "luxray" + - "budew" + - "roserade" + - "cranidos" + - "rampardos" + - "shieldon" + - "bastiodon" + - "burmy" + - "wormadam" + - "mothim" + - "combee" + - "vespiquen" + - "pachirisu" + - "buizel" + - "floatzel" + - "cherubi" + - "cherrim" + - "shellos" + - "gastrodon" + - "ambipom" + - "drifloon" + - "drifblim" + - "buneary" + - "lopunny" + - "mismagius" + - "honchkrow" + - "glameow" + - "purugly" + - "chingling" + - "stunky" + - "skuntank" + - "bronzor" + - "bronzong" + - "bonsly" + - "mimejr" + - "happiny" + - "chatot" + - "spiritomb" + - "gible" + - "gabite" + - "garchomp" + - "munchlax" + - "riolu" + - "lucario" + - "hippopotas" + - "hippowdon" + - "skorupi" + - "drapion" + - "croagunk" + - "toxicroak" + - "carnivine" + - "finneon" + - "lumineon" + - "mantyke" + - "snover" + - "abomasnow" + - "weavile" + - "magnezone" + - "lickilicky" + - "rhyperior" + - "tangrowth" + - "electivire" + - "magmortar" + - "togekiss" + - "yanmega" + - "leafeon" + - "glaceon" + - "gliscor" + - "mamoswine" + - "porygon-z" + - "gallade" + - "probopass" + - "dusknoir" + - "froslass" + - "rotom" + - "uxie" + - "mesprit" + - "azelf" + - "dialga" + - "palkia" + - "heatran" + - "regigigas" + - "giratina" + - "cresselia" + - "phione" + - "manaphy" + - "darkrai" + - "shaymin" + - "arceus" + - "victini" + - "snivy" + - "servine" + - "serperior" + - "tepig" + - "pignite" + - "emboar" + - "oshawott" + - "dewott" + - "samurott" + - "patrat" + - "watchog" + - "lillipup" + - "herdier" + - "stoutland" + - "purrloin" + - "liepard" + - "pansage" + - "simisage" + - "pansear" + - "simisear" + - "panpour" + - "simipour" + - "munna" + - "musharna" + - "pidove" + - "tranquill" + - "unfezant" + - "blitzle" + - "zebstrika" + - "roggenrola" + - "boldore" + - "gigalith" + - "woobat" + - "swoobat" + - "drilbur" + - "excadrill" + - "audino" + - "timburr" + - "gurdurr" + - "conkeldurr" + - "tympole" + - "palpitoad" + - "seismitoad" + - "throh" + - "sawk" + - "sewaddle" + - "swadloon" + - "leavanny" + - "venipede" + - "whirlipede" + - "scolipede" + - "cottonee" + - "whimsicott" + - "petilil" + - "lilligant" + - "basculin" + - "sandile" + - "krokorok" + - "krookodile" + - "darumaka" + - "darmanitan" + - "maractus" + - "dwebble" + - "crustle" + - "scraggy" + - "scrafty" + - "sigilyph" + - "yamask" + - "cofagrigus" + - "tirtouga" + - "carracosta" + - "archen" + - "archeops" + - "trubbish" + - "garbodor" + - "zorua" + - "zoroark" + - "minccino" + - "cinccino" + - "gothita" + - "gothorita" + - "gothitelle" + - "solosis" + - "duosion" + - "reuniclus" + - "ducklett" + - "swanna" + - "vanillite" + - "vanillish" + - "vanilluxe" + - "deerling" + - "sawsbuck" + - "emolga" + - "karrablast" + - "escavalier" + - "foongus" + - "amoonguss" + - "frillish" + - "jellicent" + - "alomomola" + - "joltik" + - "galvantula" + - "ferroseed" + - "ferrothorn" + - "klink" + - "klang" + - "klinklang" + - "tynamo" + - "eelektrik" + - "eelektross" + - "elgyem" + - "beheeyem" + - "litwick" + - "lampent" + - "chandelure" + - "axew" + - "fraxure" + - "haxorus" + - "cubchoo" + - "beartic" + - "cryogonal" + - "shelmet" + - "accelgor" + - "stunfisk" + - "mienfoo" + - "mienshao" + - "druddigon" + - "golett" + - "golurk" + - "pawniard" + - "bisharp" + - "bouffalant" + - "rufflet" + - "braviary" + - "vullaby" + - "mandibuzz" + - "heatmor" + - "durant" + - "deino" + - "zweilous" + - "hydreigon" + - "larvesta" + - "volcarona" + - "cobalion" + - "terrakion" + - "virizion" + - "tornadus" + - "thundurus" + - "reshiram" + - "zekrom" + - "landorus" + - "kyurem" + - "keldeo" + - "meloetta" + - "genesect" + - "chespin" + - "quilladin" + - "chesnaught" + - "fennekin" + - "braixen" + - "delphox" + - "froakie" + - "frogadier" + - "greninja" + - "bunnelby" + - "diggersby" + - "fletchling" + - "fletchinder" + - "talonflame" + - "scatterbug" + - "spewpa" + - "vivillon" + - "litleo" + - "pyroar" + - "flabebe" + - "floette" + - "florges" + - "skiddo" + - "gogoat" + - "pancham" + - "pangoro" + - "furfrou" + - "espurr" + - "meowstic" + - "honedge" + - "doublade" + - "aegislash" + - "spritzee" + - "aromatisse" + - "swirlix" + - "slurpuff" + - "inkay" + - "malamar" + - "binacle" + - "barbaracle" + - "skrelp" + - "dragalge" + - "clauncher" + - "clawitzer" + - "helioptile" + - "heliolisk" + - "tyrunt" + - "tyrantrum" + - "amaura" + - "aurorus" + - "sylveon" + - "hawlucha" + - "dedenne" + - "carbink" + - "goomy" + - "sliggoo" + - "goodra" + - "klefki" + - "phantump" + - "trevenant" + - "pumpkaboo" + - "gourgeist" + - "bergmite" + - "avalugg" + - "noibat" + - "noivern" + - "xerneas" + - "yveltal" + - "zygarde" + - "diancie" + - "hoopa" + - "volcanion" + - "rowlet" + - "dartrix" + - "decidueye" + - "litten" + - "torracat" + - "incineroar" + - "popplio" + - "brionne" + - "primarina" + - "pikipek" + - "trumbeak" + - "toucannon" + - "yungoos" + - "gumshoos" + - "grubbin" + - "charjabug" + - "vikavolt" + - "crabrawler" + - "crabominable" + - "oricorio" + - "cutiefly" + - "ribombee" + - "rockruff" + - "lycanroc" + - "wishiwashi" + - "mareanie" + - "toxapex" + - "mudbray" + - "mudsdale" + - "dewpider" + - "araquanid" + - "fomantis" + - "lurantis" + - "morelull" + - "shiinotic" + - "salandit" + - "salazzle" + - "stufful" + - "bewear" + - "bounsweet" + - "steenee" + - "tsareena" + - "comfey" + - "oranguru" + - "passimian" + - "wimpod" + - "golisopod" + - "sandygast" + - "palossand" + - "pyukumuku" + - "typenull" + - "silvally" + - "minior" + - "komala" + - "turtonator" + - "togedemaru" + - "mimikyu" + - "bruxish" + - "drampa" + - "dhelmise" + - "jangmo-o" + - "hakamo-o" + - "kommo-o" + - "tapukoko" + - "tapulele" + - "tapubulu" + - "tapufini" + - "cosmog" + - "cosmoem" + - "solgaleo" + - "lunala" + - "nihilego" + - "buzzwole" + - "pheromosa" + - "xurkitree" + - "celesteela" + - "kartana" + - "guzzlord" + - "necrozma" + - "magearna" + - "marshadow" + - "poipole" + - "naganadel" + - "stakataka" + - "blacephalon" + - "zeraora" + - "meltan" + - "melmetal" + - "grookey" + - "thwackey" + - "rillaboom" + - "scorbunny" + - "raboot" + - "cinderace" + - "sobble" + - "drizzile" + - "inteleon" + - "skwovet" + - "greedent" + - "rookidee" + - "corvisquire" + - "corviknight" + - "blipbug" + - "dottler" + - "orbeetle" + - "nickit" + - "thievul" + - "gossifleur" + - "eldegoss" + - "wooloo" + - "dubwool" + - "chewtle" + - "drednaw" + - "yamper" + - "boltund" + - "rolycoly" + - "carkol" + - "coalossal" + - "applin" + - "flapple" + - "appletun" + - "silicobra" + - "sandaconda" + - "cramorant" + - "arrokuda" + - "barraskewda" + - "toxel" + - "toxtricity" + - "sizzlipede" + - "centiskorch" + - "clobbopus" + - "grapploct" + - "sinistea" + - "polteageist" + - "hatenna" + - "hattrem" + - "hatterene" + - "impidimp" + - "morgrem" + - "grimmsnarl" + - "obstagoon" + - "perrserker" + - "cursola" + - "sirfetchd" + - "mrrime" + - "runerigus" + - "milcery" + - "alcremie" + - "falinks" + - "pincurchin" + - "snom" + - "frosmoth" + - "stonjourner" + - "eiscue" + - "indeedee" + - "morpeko" + - "cufant" + - "copperajah" + - "dracozolt" + - "arctozolt" + - "dracovish" + - "arctovish" + - "duraludon" + - "dreepy" + - "drakloak" + - "dragapult" + - "zacian" + - "zamazenta" + - "eternatus" + - "kubfu" + - "urshifu" + - "zarude" + - "regieleki" + - "regidrago" + - "glastrier" + - "spectrier" + - "calyrex" + examples: + - "ditto" + - "luxray" + - "snorlax" + order: 0 + sourceType: + title: "pokeapi" + const: "pokeapi" + enum: + - "pokeapi" + order: 0 + type: "string" + source-pokeapi-update: + type: "object" + required: + - "pokemon_name" + properties: + pokemon_name: + type: "string" + description: "Pokemon requested from the API." + title: "Pokemon Name" + pattern: "^[a-z0-9_\\-]+$" + enum: + - "bulbasaur" + - "ivysaur" + - "venusaur" + - "charmander" + - "charmeleon" + - "charizard" + - "squirtle" + - "wartortle" + - "blastoise" + - "caterpie" + - "metapod" + - "butterfree" + - "weedle" + - "kakuna" + - "beedrill" + - "pidgey" + - "pidgeotto" + - "pidgeot" + - "rattata" + - "raticate" + - "spearow" + - "fearow" + - "ekans" + - "arbok" + - "pikachu" + - "raichu" + - "sandshrew" + - "sandslash" + - "nidoranf" + - "nidorina" + - "nidoqueen" + - "nidoranm" + - "nidorino" + - "nidoking" + - "clefairy" + - "clefable" + - "vulpix" + - "ninetales" + - "jigglypuff" + - "wigglytuff" + - "zubat" + - "golbat" + - "oddish" + - "gloom" + - "vileplume" + - "paras" + - "parasect" + - "venonat" + - "venomoth" + - "diglett" + - "dugtrio" + - "meowth" + - "persian" + - "psyduck" + - "golduck" + - "mankey" + - "primeape" + - "growlithe" + - "arcanine" + - "poliwag" + - "poliwhirl" + - "poliwrath" + - "abra" + - "kadabra" + - "alakazam" + - "machop" + - "machoke" + - "machamp" + - "bellsprout" + - "weepinbell" + - "victreebel" + - "tentacool" + - "tentacruel" + - "geodude" + - "graveler" + - "golem" + - "ponyta" + - "rapidash" + - "slowpoke" + - "slowbro" + - "magnemite" + - "magneton" + - "farfetchd" + - "doduo" + - "dodrio" + - "seel" + - "dewgong" + - "grimer" + - "muk" + - "shellder" + - "cloyster" + - "gastly" + - "haunter" + - "gengar" + - "onix" + - "drowzee" + - "hypno" + - "krabby" + - "kingler" + - "voltorb" + - "electrode" + - "exeggcute" + - "exeggutor" + - "cubone" + - "marowak" + - "hitmonlee" + - "hitmonchan" + - "lickitung" + - "koffing" + - "weezing" + - "rhyhorn" + - "rhydon" + - "chansey" + - "tangela" + - "kangaskhan" + - "horsea" + - "seadra" + - "goldeen" + - "seaking" + - "staryu" + - "starmie" + - "mrmime" + - "scyther" + - "jynx" + - "electabuzz" + - "magmar" + - "pinsir" + - "tauros" + - "magikarp" + - "gyarados" + - "lapras" + - "ditto" + - "eevee" + - "vaporeon" + - "jolteon" + - "flareon" + - "porygon" + - "omanyte" + - "omastar" + - "kabuto" + - "kabutops" + - "aerodactyl" + - "snorlax" + - "articuno" + - "zapdos" + - "moltres" + - "dratini" + - "dragonair" + - "dragonite" + - "mewtwo" + - "mew" + - "chikorita" + - "bayleef" + - "meganium" + - "cyndaquil" + - "quilava" + - "typhlosion" + - "totodile" + - "croconaw" + - "feraligatr" + - "sentret" + - "furret" + - "hoothoot" + - "noctowl" + - "ledyba" + - "ledian" + - "spinarak" + - "ariados" + - "crobat" + - "chinchou" + - "lanturn" + - "pichu" + - "cleffa" + - "igglybuff" + - "togepi" + - "togetic" + - "natu" + - "xatu" + - "mareep" + - "flaaffy" + - "ampharos" + - "bellossom" + - "marill" + - "azumarill" + - "sudowoodo" + - "politoed" + - "hoppip" + - "skiploom" + - "jumpluff" + - "aipom" + - "sunkern" + - "sunflora" + - "yanma" + - "wooper" + - "quagsire" + - "espeon" + - "umbreon" + - "murkrow" + - "slowking" + - "misdreavus" + - "unown" + - "wobbuffet" + - "girafarig" + - "pineco" + - "forretress" + - "dunsparce" + - "gligar" + - "steelix" + - "snubbull" + - "granbull" + - "qwilfish" + - "scizor" + - "shuckle" + - "heracross" + - "sneasel" + - "teddiursa" + - "ursaring" + - "slugma" + - "magcargo" + - "swinub" + - "piloswine" + - "corsola" + - "remoraid" + - "octillery" + - "delibird" + - "mantine" + - "skarmory" + - "houndour" + - "houndoom" + - "kingdra" + - "phanpy" + - "donphan" + - "porygon2" + - "stantler" + - "smeargle" + - "tyrogue" + - "hitmontop" + - "smoochum" + - "elekid" + - "magby" + - "miltank" + - "blissey" + - "raikou" + - "entei" + - "suicune" + - "larvitar" + - "pupitar" + - "tyranitar" + - "lugia" + - "ho-oh" + - "celebi" + - "treecko" + - "grovyle" + - "sceptile" + - "torchic" + - "combusken" + - "blaziken" + - "mudkip" + - "marshtomp" + - "swampert" + - "poochyena" + - "mightyena" + - "zigzagoon" + - "linoone" + - "wurmple" + - "silcoon" + - "beautifly" + - "cascoon" + - "dustox" + - "lotad" + - "lombre" + - "ludicolo" + - "seedot" + - "nuzleaf" + - "shiftry" + - "taillow" + - "swellow" + - "wingull" + - "pelipper" + - "ralts" + - "kirlia" + - "gardevoir" + - "surskit" + - "masquerain" + - "shroomish" + - "breloom" + - "slakoth" + - "vigoroth" + - "slaking" + - "nincada" + - "ninjask" + - "shedinja" + - "whismur" + - "loudred" + - "exploud" + - "makuhita" + - "hariyama" + - "azurill" + - "nosepass" + - "skitty" + - "delcatty" + - "sableye" + - "mawile" + - "aron" + - "lairon" + - "aggron" + - "meditite" + - "medicham" + - "electrike" + - "manectric" + - "plusle" + - "minun" + - "volbeat" + - "illumise" + - "roselia" + - "gulpin" + - "swalot" + - "carvanha" + - "sharpedo" + - "wailmer" + - "wailord" + - "numel" + - "camerupt" + - "torkoal" + - "spoink" + - "grumpig" + - "spinda" + - "trapinch" + - "vibrava" + - "flygon" + - "cacnea" + - "cacturne" + - "swablu" + - "altaria" + - "zangoose" + - "seviper" + - "lunatone" + - "solrock" + - "barboach" + - "whiscash" + - "corphish" + - "crawdaunt" + - "baltoy" + - "claydol" + - "lileep" + - "cradily" + - "anorith" + - "armaldo" + - "feebas" + - "milotic" + - "castform" + - "kecleon" + - "shuppet" + - "banette" + - "duskull" + - "dusclops" + - "tropius" + - "chimecho" + - "absol" + - "wynaut" + - "snorunt" + - "glalie" + - "spheal" + - "sealeo" + - "walrein" + - "clamperl" + - "huntail" + - "gorebyss" + - "relicanth" + - "luvdisc" + - "bagon" + - "shelgon" + - "salamence" + - "beldum" + - "metang" + - "metagross" + - "regirock" + - "regice" + - "registeel" + - "latias" + - "latios" + - "kyogre" + - "groudon" + - "rayquaza" + - "jirachi" + - "deoxys" + - "turtwig" + - "grotle" + - "torterra" + - "chimchar" + - "monferno" + - "infernape" + - "piplup" + - "prinplup" + - "empoleon" + - "starly" + - "staravia" + - "staraptor" + - "bidoof" + - "bibarel" + - "kricketot" + - "kricketune" + - "shinx" + - "luxio" + - "luxray" + - "budew" + - "roserade" + - "cranidos" + - "rampardos" + - "shieldon" + - "bastiodon" + - "burmy" + - "wormadam" + - "mothim" + - "combee" + - "vespiquen" + - "pachirisu" + - "buizel" + - "floatzel" + - "cherubi" + - "cherrim" + - "shellos" + - "gastrodon" + - "ambipom" + - "drifloon" + - "drifblim" + - "buneary" + - "lopunny" + - "mismagius" + - "honchkrow" + - "glameow" + - "purugly" + - "chingling" + - "stunky" + - "skuntank" + - "bronzor" + - "bronzong" + - "bonsly" + - "mimejr" + - "happiny" + - "chatot" + - "spiritomb" + - "gible" + - "gabite" + - "garchomp" + - "munchlax" + - "riolu" + - "lucario" + - "hippopotas" + - "hippowdon" + - "skorupi" + - "drapion" + - "croagunk" + - "toxicroak" + - "carnivine" + - "finneon" + - "lumineon" + - "mantyke" + - "snover" + - "abomasnow" + - "weavile" + - "magnezone" + - "lickilicky" + - "rhyperior" + - "tangrowth" + - "electivire" + - "magmortar" + - "togekiss" + - "yanmega" + - "leafeon" + - "glaceon" + - "gliscor" + - "mamoswine" + - "porygon-z" + - "gallade" + - "probopass" + - "dusknoir" + - "froslass" + - "rotom" + - "uxie" + - "mesprit" + - "azelf" + - "dialga" + - "palkia" + - "heatran" + - "regigigas" + - "giratina" + - "cresselia" + - "phione" + - "manaphy" + - "darkrai" + - "shaymin" + - "arceus" + - "victini" + - "snivy" + - "servine" + - "serperior" + - "tepig" + - "pignite" + - "emboar" + - "oshawott" + - "dewott" + - "samurott" + - "patrat" + - "watchog" + - "lillipup" + - "herdier" + - "stoutland" + - "purrloin" + - "liepard" + - "pansage" + - "simisage" + - "pansear" + - "simisear" + - "panpour" + - "simipour" + - "munna" + - "musharna" + - "pidove" + - "tranquill" + - "unfezant" + - "blitzle" + - "zebstrika" + - "roggenrola" + - "boldore" + - "gigalith" + - "woobat" + - "swoobat" + - "drilbur" + - "excadrill" + - "audino" + - "timburr" + - "gurdurr" + - "conkeldurr" + - "tympole" + - "palpitoad" + - "seismitoad" + - "throh" + - "sawk" + - "sewaddle" + - "swadloon" + - "leavanny" + - "venipede" + - "whirlipede" + - "scolipede" + - "cottonee" + - "whimsicott" + - "petilil" + - "lilligant" + - "basculin" + - "sandile" + - "krokorok" + - "krookodile" + - "darumaka" + - "darmanitan" + - "maractus" + - "dwebble" + - "crustle" + - "scraggy" + - "scrafty" + - "sigilyph" + - "yamask" + - "cofagrigus" + - "tirtouga" + - "carracosta" + - "archen" + - "archeops" + - "trubbish" + - "garbodor" + - "zorua" + - "zoroark" + - "minccino" + - "cinccino" + - "gothita" + - "gothorita" + - "gothitelle" + - "solosis" + - "duosion" + - "reuniclus" + - "ducklett" + - "swanna" + - "vanillite" + - "vanillish" + - "vanilluxe" + - "deerling" + - "sawsbuck" + - "emolga" + - "karrablast" + - "escavalier" + - "foongus" + - "amoonguss" + - "frillish" + - "jellicent" + - "alomomola" + - "joltik" + - "galvantula" + - "ferroseed" + - "ferrothorn" + - "klink" + - "klang" + - "klinklang" + - "tynamo" + - "eelektrik" + - "eelektross" + - "elgyem" + - "beheeyem" + - "litwick" + - "lampent" + - "chandelure" + - "axew" + - "fraxure" + - "haxorus" + - "cubchoo" + - "beartic" + - "cryogonal" + - "shelmet" + - "accelgor" + - "stunfisk" + - "mienfoo" + - "mienshao" + - "druddigon" + - "golett" + - "golurk" + - "pawniard" + - "bisharp" + - "bouffalant" + - "rufflet" + - "braviary" + - "vullaby" + - "mandibuzz" + - "heatmor" + - "durant" + - "deino" + - "zweilous" + - "hydreigon" + - "larvesta" + - "volcarona" + - "cobalion" + - "terrakion" + - "virizion" + - "tornadus" + - "thundurus" + - "reshiram" + - "zekrom" + - "landorus" + - "kyurem" + - "keldeo" + - "meloetta" + - "genesect" + - "chespin" + - "quilladin" + - "chesnaught" + - "fennekin" + - "braixen" + - "delphox" + - "froakie" + - "frogadier" + - "greninja" + - "bunnelby" + - "diggersby" + - "fletchling" + - "fletchinder" + - "talonflame" + - "scatterbug" + - "spewpa" + - "vivillon" + - "litleo" + - "pyroar" + - "flabebe" + - "floette" + - "florges" + - "skiddo" + - "gogoat" + - "pancham" + - "pangoro" + - "furfrou" + - "espurr" + - "meowstic" + - "honedge" + - "doublade" + - "aegislash" + - "spritzee" + - "aromatisse" + - "swirlix" + - "slurpuff" + - "inkay" + - "malamar" + - "binacle" + - "barbaracle" + - "skrelp" + - "dragalge" + - "clauncher" + - "clawitzer" + - "helioptile" + - "heliolisk" + - "tyrunt" + - "tyrantrum" + - "amaura" + - "aurorus" + - "sylveon" + - "hawlucha" + - "dedenne" + - "carbink" + - "goomy" + - "sliggoo" + - "goodra" + - "klefki" + - "phantump" + - "trevenant" + - "pumpkaboo" + - "gourgeist" + - "bergmite" + - "avalugg" + - "noibat" + - "noivern" + - "xerneas" + - "yveltal" + - "zygarde" + - "diancie" + - "hoopa" + - "volcanion" + - "rowlet" + - "dartrix" + - "decidueye" + - "litten" + - "torracat" + - "incineroar" + - "popplio" + - "brionne" + - "primarina" + - "pikipek" + - "trumbeak" + - "toucannon" + - "yungoos" + - "gumshoos" + - "grubbin" + - "charjabug" + - "vikavolt" + - "crabrawler" + - "crabominable" + - "oricorio" + - "cutiefly" + - "ribombee" + - "rockruff" + - "lycanroc" + - "wishiwashi" + - "mareanie" + - "toxapex" + - "mudbray" + - "mudsdale" + - "dewpider" + - "araquanid" + - "fomantis" + - "lurantis" + - "morelull" + - "shiinotic" + - "salandit" + - "salazzle" + - "stufful" + - "bewear" + - "bounsweet" + - "steenee" + - "tsareena" + - "comfey" + - "oranguru" + - "passimian" + - "wimpod" + - "golisopod" + - "sandygast" + - "palossand" + - "pyukumuku" + - "typenull" + - "silvally" + - "minior" + - "komala" + - "turtonator" + - "togedemaru" + - "mimikyu" + - "bruxish" + - "drampa" + - "dhelmise" + - "jangmo-o" + - "hakamo-o" + - "kommo-o" + - "tapukoko" + - "tapulele" + - "tapubulu" + - "tapufini" + - "cosmog" + - "cosmoem" + - "solgaleo" + - "lunala" + - "nihilego" + - "buzzwole" + - "pheromosa" + - "xurkitree" + - "celesteela" + - "kartana" + - "guzzlord" + - "necrozma" + - "magearna" + - "marshadow" + - "poipole" + - "naganadel" + - "stakataka" + - "blacephalon" + - "zeraora" + - "meltan" + - "melmetal" + - "grookey" + - "thwackey" + - "rillaboom" + - "scorbunny" + - "raboot" + - "cinderace" + - "sobble" + - "drizzile" + - "inteleon" + - "skwovet" + - "greedent" + - "rookidee" + - "corvisquire" + - "corviknight" + - "blipbug" + - "dottler" + - "orbeetle" + - "nickit" + - "thievul" + - "gossifleur" + - "eldegoss" + - "wooloo" + - "dubwool" + - "chewtle" + - "drednaw" + - "yamper" + - "boltund" + - "rolycoly" + - "carkol" + - "coalossal" + - "applin" + - "flapple" + - "appletun" + - "silicobra" + - "sandaconda" + - "cramorant" + - "arrokuda" + - "barraskewda" + - "toxel" + - "toxtricity" + - "sizzlipede" + - "centiskorch" + - "clobbopus" + - "grapploct" + - "sinistea" + - "polteageist" + - "hatenna" + - "hattrem" + - "hatterene" + - "impidimp" + - "morgrem" + - "grimmsnarl" + - "obstagoon" + - "perrserker" + - "cursola" + - "sirfetchd" + - "mrrime" + - "runerigus" + - "milcery" + - "alcremie" + - "falinks" + - "pincurchin" + - "snom" + - "frosmoth" + - "stonjourner" + - "eiscue" + - "indeedee" + - "morpeko" + - "cufant" + - "copperajah" + - "dracozolt" + - "arctozolt" + - "dracovish" + - "arctovish" + - "duraludon" + - "dreepy" + - "drakloak" + - "dragapult" + - "zacian" + - "zamazenta" + - "eternatus" + - "kubfu" + - "urshifu" + - "zarude" + - "regieleki" + - "regidrago" + - "glastrier" + - "spectrier" + - "calyrex" + examples: + - "ditto" + - "luxray" + - "snorlax" + order: 0 + source-senseforce: + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: + "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + order: 1 + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: + "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + order: 2 + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: + "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + format: "date" + order: 4 + sourceType: + title: "senseforce" + const: "senseforce" + enum: + - "senseforce" + order: 0 + type: "string" + source-senseforce-update: + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + order: 0 + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: + "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + order: 1 + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: + "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + order: 2 + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: + "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + format: "date" + order: 4 + source-freshsales: + type: "object" + required: + - "domain_name" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshsales API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain_name: + type: "string" + order: 0 + title: "Domain Name" + examples: + - "mydomain.myfreshworks.com" + description: "The Name of your Freshsales domain" + sourceType: + title: "freshsales" + const: "freshsales" + enum: + - "freshsales" + order: 0 + type: "string" + source-freshsales-update: + type: "object" + required: + - "domain_name" + - "api_key" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshsales API Key. See here. The key is case sensitive." + airbyte_secret: true + domain_name: + type: "string" + order: 0 + title: "Domain Name" + examples: + - "mydomain.myfreshworks.com" + description: "The Name of your Freshsales domain" + source-hubplanner: + title: "Hubplanner Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Hubplanner API key. See https://github.com/hubplanner/API#authentication\ + \ for more details." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "hubplanner" + const: "hubplanner" + enum: + - "hubplanner" + order: 0 + type: "string" + source-hubplanner-update: + title: "Hubplanner Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Hubplanner API key. See https://github.com/hubplanner/API#authentication\ + \ for more details." + airbyte_secret: true + source-square: + title: "Square Spec" + type: "object" + required: + - "is_sandbox" + - "sourceType" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to Square." + type: "object" + order: 0 + oneOf: + - title: "Oauth authentication" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API key" + type: "object" + required: + - "auth_type" + - "api_key" + properties: + auth_type: + type: "string" + const: "API Key" + order: 1 + enum: + - "API Key" + api_key: + type: "string" + title: "API key token" + description: "The API key for a Square application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + is_sandbox: + type: "boolean" + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + default: false + order: 1 + start_date: + type: "string" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If not set, all data will be replicated." + title: "Start Date" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + format: "date" + include_deleted_objects: + type: "boolean" + description: + "In some streams there is an option to include deleted objects\ + \ (Items, Categories, Discounts, Taxes)" + title: "Include Deleted Objects" + default: false + order: 3 + sourceType: + title: "square" + const: "square" + enum: + - "square" + order: 0 + type: "string" + source-square-update: + title: "Square Spec" + type: "object" + required: + - "is_sandbox" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to Square." + type: "object" + order: 0 + oneOf: + - title: "Oauth authentication" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + - title: "API key" + type: "object" + required: + - "auth_type" + - "api_key" + properties: + auth_type: + type: "string" + const: "API Key" + order: 1 + enum: + - "API Key" + api_key: + type: "string" + title: "API key token" + description: "The API key for a Square application" + airbyte_secret: true + is_sandbox: + type: "boolean" + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + default: false + order: 1 + start_date: + type: "string" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If not set, all data will be replicated." + title: "Start Date" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + format: "date" + include_deleted_objects: + type: "boolean" + description: + "In some streams there is an option to include deleted objects\ + \ (Items, Categories, Discounts, Taxes)" + title: "Include Deleted Objects" + default: false + order: 3 + source-paystack: + type: "object" + required: + - "start_date" + - "secret_key" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 0 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set, the connector will always reload data from the past\ + \ N days, where N is the value set here. This is useful if your data is\ + \ updated after creation." + order: 1 + secret_key: + type: "string" + title: "Secret Key" + pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$" + description: + "The Paystack API key (usually starts with 'sk_live_'; find\ + \ yours here)." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "paystack" + const: "paystack" + enum: + - "paystack" + order: 0 + type: "string" + source-paystack-update: + type: "object" + required: + - "start_date" + - "secret_key" + properties: + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 0 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set, the connector will always reload data from the past\ + \ N days, where N is the value set here. This is useful if your data is\ + \ updated after creation." + order: 1 + secret_key: + type: "string" + title: "Secret Key" + pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$" + description: + "The Paystack API key (usually starts with 'sk_live_'; find\ + \ yours here)." + airbyte_secret: true + order: 2 + source-redshift: + title: "Redshift Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "sourceType" + properties: + host: + title: "Host" + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)." + type: "string" + order: 1 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + order: 2 + database: + title: "Database" + description: "Name of the database." + type: "string" + examples: + - "master" + order: 3 + schemas: + title: "Schemas" + description: + "The list of schemas to sync from. Specify one or more explicitly\ + \ or keep empty to process all schemas. Schema names are case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + examples: + - "public" + order: 4 + username: + title: "Username" + description: "Username to use to access the database." + type: "string" + order: 5 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 6 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + sourceType: + title: "redshift" + const: "redshift" + enum: + - "redshift" + order: 0 + type: "string" + source-redshift-update: + title: "Redshift Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + properties: + host: + title: "Host" + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)." + type: "string" + order: 1 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + order: 2 + database: + title: "Database" + description: "Name of the database." + type: "string" + examples: + - "master" + order: 3 + schemas: + title: "Schemas" + description: + "The list of schemas to sync from. Specify one or more explicitly\ + \ or keep empty to process all schemas. Schema names are case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + examples: + - "public" + order: 4 + username: + title: "Username" + description: "Username to use to access the database." + type: "string" + order: 5 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + source-productive: + type: "object" + required: + - "api_key" + - "organization_id" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: + "The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations`\ + \ page" + order: 1 + title: "Organization ID" + sourceType: + title: "productive" + const: "productive" + enum: + - "productive" + order: 0 + type: "string" + source-productive-update: + type: "object" + required: + - "api_key" + - "organization_id" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + organization_id: + type: "string" + description: + "The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations`\ + \ page" + order: 1 + title: "Organization ID" + source-survicate: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "survicate" + const: "survicate" + enum: + - "survicate" + order: 0 + type: "string" + source-survicate-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-braintree: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant ID" + description: + "The unique identifier for your entire gateway account. See\ + \ the docs for more information on how to obtain this ID." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: + "Braintree Public Key. See the docs for more information on how to obtain this key." + name: "Public Key" + type: "string" + private_key: + title: "Private Key" + description: + "Braintree Private Key. See the docs for more information on how to obtain this key." + name: "Private Key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + name: "Start Date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + title: "Environment" + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + sourceType: + title: "braintree" + const: "braintree" + enum: + - "braintree" + order: 0 + type: "string" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + - "sourceType" + source-braintree-update: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant ID" + description: + "The unique identifier for your entire gateway account. See\ + \ the docs for more information on how to obtain this ID." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: + "Braintree Public Key. See the docs for more information on how to obtain this key." + name: "Public Key" + type: "string" + private_key: + title: "Private Key" + description: + "Braintree Private Key. See the docs for more information on how to obtain this key." + name: "Private Key" + airbyte_secret: true + type: "string" + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + name: "Start Date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + title: "Environment" + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + source-mailchimp: + title: "Mailchimp Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + type: "object" + title: "Authentication" + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "auth_type" + - "access_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "An access token generated using the above client ID\ + \ and secret." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Key" + required: + - "auth_type" + - "apikey" + properties: + auth_type: + type: "string" + const: "apikey" + order: 1 + enum: + - "apikey" + apikey: + type: "string" + title: "API Key" + description: + "Mailchimp API Key. See the docs for information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Incremental Sync Start Date" + description: + "The date from which you want to start syncing data for Incremental\ + \ streams. Only records that have been created or modified since this\ + \ date will be synced. If left blank, all data will by synced." + type: "string" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-01-01T00:00:00.000Z" + sourceType: + title: "mailchimp" + const: "mailchimp" + enum: + - "mailchimp" + order: 0 + type: "string" + source-mailchimp-update: + title: "Mailchimp Spec" + type: "object" + required: [] + properties: + credentials: + type: "object" + title: "Authentication" + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "auth_type" + - "access_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "An access token generated using the above client ID\ + \ and secret." + airbyte_secret: true + - type: "object" + title: "API Key" + required: + - "auth_type" + - "apikey" + properties: + auth_type: + type: "string" + const: "apikey" + order: 1 + enum: + - "apikey" + apikey: + type: "string" + title: "API Key" + description: + "Mailchimp API Key. See the docs for information on how to generate this key." + airbyte_secret: true + start_date: + title: "Incremental Sync Start Date" + description: + "The date from which you want to start syncing data for Incremental\ + \ streams. Only records that have been created or modified since this\ + \ date will be synced. If left blank, all data will by synced." + type: "string" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-01-01T00:00:00.000Z" + source-airtable: + title: "Airtable Source Spec" + type: "object" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Personal Access Token" + type: "object" + required: + - "api_key" + properties: + auth_method: + type: "string" + const: "api_key" + enum: + - "api_key" + api_key: + type: "string" + description: + "The Personal Access Token for the Airtable account.\ + \ See the Support Guide for more information on how to obtain this token." + title: "Personal Access Token" + airbyte_secret: true + examples: + - "key1234567890" + x-speakeasy-param-sensitive: true + sourceType: + title: "airtable" + const: "airtable" + enum: + - "airtable" + order: 0 + type: "string" + source-airtable-update: + title: "Airtable Source Spec" + type: "object" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + - title: "Personal Access Token" + type: "object" + required: + - "api_key" + properties: + auth_method: + type: "string" + const: "api_key" + enum: + - "api_key" + api_key: + type: "string" + description: + "The Personal Access Token for the Airtable account.\ + \ See the Support Guide for more information on how to obtain this token." + title: "Personal Access Token" + airbyte_secret: true + examples: + - "key1234567890" + source-mssql: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "sourceType" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 0 + certificate: + title: "Certificate" + type: "string" + description: + "certificate of the server, or of the CA that signed\ + \ the server certificate" + order: 1 + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 3600 + order: 3 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 4 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 5 + min: 1000 + max: 10000 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 6 + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + source-mssql-update: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 0 + certificate: + title: "Certificate" + type: "string" + description: + "certificate of the server, or of the CA that signed\ + \ the server certificate" + order: 1 + airbyte_secret: true + multiline: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 3600 + order: 3 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 4 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 5 + min: 1000 + max: 10000 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 6 + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-dynamodb: + title: "Dynamodb Source Spec" + type: "object" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Access Keys" + type: + - "null" + - "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "User" + order: 0 + enum: + - "User" + access_key_id: + order: 1 + title: "Dynamodb Key Id" + type: "string" + description: + "The access key id to access Dynamodb. Airbyte requires\ + \ read permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + x-speakeasy-param-sensitive: true + secret_access_key: + order: 2 + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + x-speakeasy-param-sensitive: true + - type: "object" + title: "Role Based Authentication" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Role" + order: 0 + enum: + - "Role" + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + reserved_attribute_names: + title: "Reserved attribute names" + type: "string" + description: "Comma separated reserved attribute names present in your tables" + airbyte_secret: true + examples: + - "name, field_name, field-name" + x-speakeasy-param-sensitive: true + ignore_missing_read_permissions_tables: + title: "Ignore missing read permissions tables" + type: "boolean" + description: "Ignore tables with missing scan/read permissions" + default: false + sourceType: + title: "dynamodb" + const: "dynamodb" + enum: + - "dynamodb" + order: 0 + type: "string" + source-dynamodb-update: + title: "Dynamodb Source Spec" + type: "object" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Access Keys" + type: + - "null" + - "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "User" + order: 0 + enum: + - "User" + access_key_id: + order: 1 + title: "Dynamodb Key Id" + type: "string" + description: + "The access key id to access Dynamodb. Airbyte requires\ + \ read permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + order: 2 + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - type: "object" + title: "Role Based Authentication" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Role" + order: 0 + enum: + - "Role" + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + reserved_attribute_names: + title: "Reserved attribute names" + type: "string" + description: "Comma separated reserved attribute names present in your tables" + airbyte_secret: true + examples: + - "name, field_name, field-name" + ignore_missing_read_permissions_tables: + title: "Ignore missing read permissions tables" + type: "boolean" + description: "Ignore tables with missing scan/read permissions" + default: false + source-kissmetrics: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "kissmetrics" + const: "kissmetrics" + enum: + - "kissmetrics" + order: 0 + type: "string" + source-kissmetrics-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + source-salesforce: + title: "Salesforce Source Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + is_sandbox: + title: "Sandbox" + description: + "Toggle if you're using a Salesforce Sandbox" + type: "boolean" + default: false + order: 1 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce\ + \ account." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\ + \ format. Airbyte will replicate the data updated on and after this date.\ + \ If this field is blank, Airbyte will replicate the data for last two\ + \ years." + type: "string" + pattern: "^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?)$" + pattern_descriptor: "YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ" + examples: + - "2021-07-25" + - "2021-07-25T00:00:00Z" + format: "date-time" + order: 5 + force_use_bulk_api: + title: "Force to use BULK API" + type: "boolean" + description: + "Toggle to use Bulk API (this might cause empty fields for\ + \ some streams)" + default: false + order: 6 + stream_slice_step: + title: "Stream Slice Step for Incremental sync" + type: "string" + description: "The size of the time window (ISO8601 duration) to slice requests." + default: "P30D" + order: 7 + examples: + - "PT12H" + - "P7D" + - "P30D" + - "P1M" + - "P1Y" + streams_criteria: + type: "array" + order: 8 + items: + type: "object" + required: + - "criteria" + - "value" + properties: + criteria: + type: "string" + title: "Search criteria" + enum: + - "starts with" + - "ends with" + - "contains" + - "exacts" + - "starts not with" + - "ends not with" + - "not contains" + - "not exacts" + order: 1 + default: "contains" + value: + type: "string" + title: "Search value" + order: 2 + title: "Filter Salesforce Objects" + description: + "Add filters to select only required stream based on `SObject`\ + \ name. Use this field to filter which tables are displayed by this connector.\ + \ This is useful if your Salesforce account has a large number of tables\ + \ (>1000), in which case you may find it easier to navigate the UI and\ + \ speed up the connector's performance if you restrict the tables displayed\ + \ by this connector." + sourceType: + title: "salesforce" + const: "salesforce" + enum: + - "salesforce" + order: 0 + type: "string" + source-salesforce-update: + title: "Salesforce Source Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + is_sandbox: + title: "Sandbox" + description: + "Toggle if you're using a Salesforce Sandbox" + type: "boolean" + default: false + order: 1 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + refresh_token: + title: "Refresh Token" + description: + "Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce\ + \ account." + type: "string" + airbyte_secret: true + order: 4 + start_date: + title: "Start Date" + description: + "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\ + \ format. Airbyte will replicate the data updated on and after this date.\ + \ If this field is blank, Airbyte will replicate the data for last two\ + \ years." + type: "string" + pattern: "^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?)$" + pattern_descriptor: "YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ" + examples: + - "2021-07-25" + - "2021-07-25T00:00:00Z" + format: "date-time" + order: 5 + force_use_bulk_api: + title: "Force to use BULK API" + type: "boolean" + description: + "Toggle to use Bulk API (this might cause empty fields for\ + \ some streams)" + default: false + order: 6 + stream_slice_step: + title: "Stream Slice Step for Incremental sync" + type: "string" + description: "The size of the time window (ISO8601 duration) to slice requests." + default: "P30D" + order: 7 + examples: + - "PT12H" + - "P7D" + - "P30D" + - "P1M" + - "P1Y" + streams_criteria: + type: "array" + order: 8 + items: + type: "object" + required: + - "criteria" + - "value" + properties: + criteria: + type: "string" + title: "Search criteria" + enum: + - "starts with" + - "ends with" + - "contains" + - "exacts" + - "starts not with" + - "ends not with" + - "not contains" + - "not exacts" + order: 1 + default: "contains" + value: + type: "string" + title: "Search value" + order: 2 + title: "Filter Salesforce Objects" + description: + "Add filters to select only required stream based on `SObject`\ + \ name. Use this field to filter which tables are displayed by this connector.\ + \ This is useful if your Salesforce account has a large number of tables\ + \ (>1000), in which case you may find it easier to navigate the UI and\ + \ speed up the connector's performance if you restrict the tables displayed\ + \ by this connector." + source-clickhouse: + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "sourceType" + properties: + host: + description: "The host endpoint of the Clickhouse cluster." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "default" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with this username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "clickhouse" + const: "clickhouse" + enum: + - "clickhouse" + order: 0 + type: "string" + source-clickhouse-update: + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + description: "The host endpoint of the Clickhouse cluster." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "default" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with this username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-postmarkapp: + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + - "sourceType" + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + x-speakeasy-param-sensitive: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "postmarkapp" + const: "postmarkapp" + enum: + - "postmarkapp" + order: 0 + type: "string" + source-postmarkapp-update: + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + source-bitly: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "bitly" + const: "bitly" + enum: + - "bitly" + order: 0 + type: "string" + source-bitly-update: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-hardcoded-records: + title: "Hardcoded Records Source Spec" + type: "object" + required: + - "sourceType" + properties: + count: + title: "Count" + description: "How many records per stream should be generated" + type: "integer" + minimum: 1 + default: 1000 + order: 0 + sourceType: + title: "hardcoded-records" + const: "hardcoded-records" + enum: + - "hardcoded-records" + order: 0 + type: "string" + source-hardcoded-records-update: + title: "Hardcoded Records Source Spec" + type: "object" + required: [] + properties: + count: + title: "Count" + description: "How many records per stream should be generated" + type: "integer" + minimum: 1 + default: 1000 + order: 0 + source-faker: + title: "Faker Source Spec" + type: "object" + required: + - "sourceType" + properties: + count: + title: "Count" + description: + "How many users should be generated in total. The purchases\ + \ table will be scaled to match, with 10 purchases created per 10 users.\ + \ This setting does not apply to the products stream." + type: "integer" + minimum: 1 + default: 1000 + order: 0 + seed: + title: "Seed" + description: + "Manually control the faker random seed to return the same\ + \ values on subsequent runs (leave -1 for random)" + type: "integer" + default: -1 + order: 1 + records_per_slice: + title: "Records Per Stream Slice" + description: + "How many fake records will be in each page (stream slice),\ + \ before a state message is emitted?" + type: "integer" + minimum: 1 + default: 1000 + order: 2 + always_updated: + title: "Always Updated" + description: + "Should the updated_at values for every record be new each\ + \ sync? Setting this to false will case the source to stop emitting records\ + \ after COUNT records have been emitted." + type: "boolean" + default: true + parallelism: + title: "Parallelism" + description: + "How many parallel workers should we use to generate fake data?\ + \ Choose a value equal to the number of CPUs you will allocate to this\ + \ source." + type: "integer" + minimum: 1 + default: 4 + order: 4 + sourceType: + title: "faker" + const: "faker" + enum: + - "faker" + order: 0 + type: "string" + source-faker-update: + title: "Faker Source Spec" + type: "object" + required: [] + properties: + count: + title: "Count" + description: + "How many users should be generated in total. The purchases\ + \ table will be scaled to match, with 10 purchases created per 10 users.\ + \ This setting does not apply to the products stream." + type: "integer" + minimum: 1 + default: 1000 + order: 0 + seed: + title: "Seed" + description: + "Manually control the faker random seed to return the same\ + \ values on subsequent runs (leave -1 for random)" + type: "integer" + default: -1 + order: 1 + records_per_slice: + title: "Records Per Stream Slice" + description: + "How many fake records will be in each page (stream slice),\ + \ before a state message is emitted?" + type: "integer" + minimum: 1 + default: 1000 + order: 2 + always_updated: + title: "Always Updated" + description: + "Should the updated_at values for every record be new each\ + \ sync? Setting this to false will case the source to stop emitting records\ + \ after COUNT records have been emitted." + type: "boolean" + default: true + parallelism: + title: "Parallelism" + description: + "How many parallel workers should we use to generate fake data?\ + \ Choose a value equal to the number of CPUs you will allocate to this\ + \ source." + type: "integer" + minimum: 1 + default: 4 + order: 4 + source-lever-hiring: + title: "Lever Hiring Source Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + order: 3 + title: "Authentication Mechanism" + description: "Choose how to authenticate to Lever Hiring." + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Lever (OAuth)" + required: + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate via Lever (Api Key)" + required: + - "api_key" + properties: + auth_type: + type: "string" + const: "Api Key" + order: 0 + enum: + - "Api Key" + api_key: + title: "Api key" + type: "string" + description: "The Api Key of your Lever Hiring account." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + order: 0 + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. Note that it will be used\ + \ only in the following incremental streams: comments, commits, and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + environment: + order: 1 + type: "string" + title: "Environment" + description: + "The environment in which you'd like to replicate data for\ + \ Lever. This is used to determine which Lever API endpoint to use." + default: "Sandbox" + enum: + - "Production" + - "Sandbox" + sourceType: + title: "lever-hiring" + const: "lever-hiring" + enum: + - "lever-hiring" + order: 0 + type: "string" + source-lever-hiring-update: + title: "Lever Hiring Source Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + order: 3 + title: "Authentication Mechanism" + description: "Choose how to authenticate to Lever Hiring." + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Lever (OAuth)" + required: + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining new access token." + airbyte_secret: true + - type: "object" + title: "Authenticate via Lever (Api Key)" + required: + - "api_key" + properties: + auth_type: + type: "string" + const: "Api Key" + order: 0 + enum: + - "Api Key" + api_key: + title: "Api key" + type: "string" + description: "The Api Key of your Lever Hiring account." + airbyte_secret: true + order: 1 + start_date: + order: 0 + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. Note that it will be used\ + \ only in the following incremental streams: comments, commits, and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + environment: + order: 1 + type: "string" + title: "Environment" + description: + "The environment in which you'd like to replicate data for\ + \ Lever. This is used to determine which Lever API endpoint to use." + default: "Sandbox" + enum: + - "Production" + - "Sandbox" + source-braze: + title: "Braze Spec" + type: "object" + required: + - "url" + - "api_key" + - "start_date" + - "sourceType" + properties: + url: + type: "string" + title: "URL" + description: "Braze REST API endpoint" + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Braze REST API key" + x-speakeasy-param-sensitive: true + start_date: + type: "string" + format: "date" + title: "Start date" + description: "Rows after this date will be synced" + sourceType: + title: "braze" + const: "braze" + enum: + - "braze" + order: 0 + type: "string" + source-braze-update: + title: "Braze Spec" + type: "object" + required: + - "url" + - "api_key" + - "start_date" + properties: + url: + type: "string" + title: "URL" + description: "Braze REST API endpoint" + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Braze REST API key" + start_date: + type: "string" + format: "date" + title: "Start date" + description: "Rows after this date will be synced" + source-sftp: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + - "sourceType" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + sourceType: + title: "sftp" + const: "sftp" + enum: + - "sftp" + order: 0 + type: "string" + source-sftp-update: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + source-google-drive: + title: "Google Drive Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + folder_url: + title: "Folder Url" + description: + "URL for the folder you want to sync. Using individual streams\ + \ and glob patterns, it's possible to only sync a subset of all files\ + \ located in the folder." + examples: + - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn" + order: 0 + pattern: "^https://drive.google.com/.+" + pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID" + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Google Drive API" + type: "object" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + service_account_info: + title: "Service Account Information" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "service_account_info" + - "auth_type" + sourceType: + title: "google-drive" + const: "google-drive" + enum: + - "google-drive" + order: 0 + type: "string" + required: + - "streams" + - "folder_url" + - "credentials" + - "sourceType" + source-google-drive-update: + title: "Google Drive Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + folder_url: + title: "Folder Url" + description: + "URL for the folder you want to sync. Using individual streams\ + \ and glob patterns, it's possible to only sync a subset of all files\ + \ located in the folder." + examples: + - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn" + order: 0 + pattern: "^https://drive.google.com/.+" + pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID" + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Google Drive API" + type: "object" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token for the Google Drive API" + airbyte_secret: true + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + service_account_info: + title: "Service Account Information" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + type: "string" + required: + - "service_account_info" + - "auth_type" + required: + - "streams" + - "folder_url" + - "credentials" + source-mailjet-sms: + type: "object" + required: + - "token" + - "sourceType" + properties: + end_date: + type: "integer" + title: "End date" + description: + "Retrieve SMS messages created before the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666281656 + order: 0 + start_date: + type: "integer" + title: "Start date" + description: + "Retrieve SMS messages created after the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666261656 + order: 1 + token: + type: "string" + title: "Access Token" + description: + "Your access token. See here." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "mailjet-sms" + const: "mailjet-sms" + enum: + - "mailjet-sms" + order: 0 + type: "string" + source-mailjet-sms-update: + type: "object" + required: + - "token" + properties: + end_date: + type: "integer" + title: "End date" + description: + "Retrieve SMS messages created before the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666281656 + order: 0 + start_date: + type: "integer" + title: "Start date" + description: + "Retrieve SMS messages created after the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666261656 + order: 1 + token: + type: "string" + title: "Access Token" + description: + "Your access token. See here." + airbyte_secret: true + order: 2 + source-chameleon: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + filter: + type: "string" + description: "Filter for using in the `segments_experiences` stream" + enum: + - "tour" + - "survey" + - "launcher" + order: 3 + title: "Filter" + default: "tour" + end_date: + type: "string" + order: 4 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "chameleon" + const: "chameleon" + enum: + - "chameleon" + order: 0 + type: "string" + source-chameleon-update: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + filter: + type: "string" + description: "Filter for using in the `segments_experiences` stream" + enum: + - "tour" + - "survey" + - "launcher" + order: 3 + title: "Filter" + default: "tour" + end_date: + type: "string" + order: 4 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-gcs: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be\nmodified to uptake the changes because it is responsible for\ + \ converting\nlegacy GCS configs into file based configs using the File-Based\ + \ CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + service_account: + title: "Service Account Information" + description: + "Enter your Google Cloud service account key in JSON format" + airbyte_secret: true + order: 0 + type: "string" + x-speakeasy-param-sensitive: true + bucket: + title: "Bucket" + description: "Name of the GCS bucket where the file(s) exist." + order: 2 + type: "string" + sourceType: + title: "gcs" + const: "gcs" + enum: + - "gcs" + order: 0 + type: "string" + required: + - "streams" + - "service_account" + - "bucket" + - "sourceType" + source-gcs-update: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be\nmodified to uptake the changes because it is responsible for\ + \ converting\nlegacy GCS configs into file based configs using the File-Based\ + \ CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + service_account: + title: "Service Account Information" + description: + "Enter your Google Cloud service account key in JSON format" + airbyte_secret: true + order: 0 + type: "string" + bucket: + title: "Bucket" + description: "Name of the GCS bucket where the file(s) exist." + order: 2 + type: "string" + required: + - "streams" + - "service_account" + - "bucket" + source-basecamp: + type: "object" + required: + - "account_id" + - "start_date" + - "client_id" + - "client_secret" + - "client_refresh_token_2" + - "sourceType" + properties: + account_id: + type: "number" + order: 0 + title: "Account ID" + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_id: + type: "string" + title: "Client ID" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + client_refresh_token_2: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "basecamp" + const: "basecamp" + enum: + - "basecamp" + order: 0 + type: "string" + source-basecamp-update: + type: "object" + required: + - "account_id" + - "start_date" + - "client_id" + - "client_secret" + - "client_refresh_token_2" + properties: + account_id: + type: "number" + order: 0 + title: "Account ID" + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_id: + type: "string" + title: "Client ID" + airbyte_secret: true + order: 2 + client_secret: + type: "string" + title: "Client secret" + airbyte_secret: true + order: 3 + client_refresh_token_2: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 4 + source-qualaroo: + title: "Qualaroo Spec" + type: "object" + required: + - "token" + - "key" + - "start_date" + - "sourceType" + properties: + token: + type: "string" + title: "API token" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + key: + type: "string" + title: "API key" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00.000Z" + survey_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all surveys to which you have access will be\ + \ replicated." + sourceType: + title: "qualaroo" + const: "qualaroo" + enum: + - "qualaroo" + order: 0 + type: "string" + source-qualaroo-update: + title: "Qualaroo Spec" + type: "object" + required: + - "token" + - "key" + - "start_date" + properties: + token: + type: "string" + title: "API token" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + key: + type: "string" + title: "API key" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00.000Z" + survey_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all surveys to which you have access will be\ + \ replicated." + source-nytimes: + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + sourceType: + title: "nytimes" + const: "nytimes" + enum: + - "nytimes" + order: 0 + type: "string" + source-nytimes-update: + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + source-greenhouse: + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "greenhouse" + const: "greenhouse" + enum: + - "greenhouse" + order: 0 + type: "string" + source-greenhouse-update: + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + order: 0 + source-front: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + page_limit: + type: "string" + description: "Page limit for the responses" + title: "Page limit" + default: "50" + order: 2 + sourceType: + title: "front" + const: "front" + enum: + - "front" + order: 0 + type: "string" + source-front-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + page_limit: + type: "string" + description: "Page limit for the responses" + title: "Page limit" + default: "50" + order: 2 + trello: + title: null + zendesk-chat: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Zendesk Chat Spec" + google-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + title: "Google Ads Spec" + google-search-console: + properties: + authorization: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + title: "Google Search Console Spec" + shopify: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + title: "Shopify Source CDK Specifications" + retently: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + title: "Retently Api Spec" + instagram: + properties: + client_id: + title: "Client Id" + description: "The Client ID for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Instagram" + azure-blob-storage: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "SourceAzureBlobStorageSpec" + zendesk-sunshine: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: null + snapchat-marketing: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + title: "Snapchat Marketing Spec" + gitlab: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + title: "Source Gitlab Spec" + snowflake: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + title: "Snowflake Source Spec" + microsoft-sharepoint: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft SharePoint Source Spec" + smartsheets: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + title: "Smartsheets Source Spec" + notion: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + title: "Notion Source Spec" + slack: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + title: "Slack Spec" + youtube-analytics: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + title: "YouTube Analytics Spec" + google-sheets: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + title: "Google Sheets Source Spec" + zendesk-talk: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + title: "Source Zendesk Talk Spec" + asana: + properties: + credentials: + properties: + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + title: "Asana Spec" + microsoft-teams: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Microsoft Teams developer application." + airbyte_secret: true + title: "Microsoft Teams Spec" + amazon-seller-partner: + properties: + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + title: "Amazon Seller Partner Spec" + linkedin-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + title: "Linkedin Ads Spec" + pinterest: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Pinterest Spec" + zendesk-support: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + title: "Source Zendesk Support Spec" + microsoft-onedrive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft OneDrive Source Spec" + tiktok-marketing: + properties: + credentials: + properties: + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + title: "TikTok Marketing Source Spec" + hubspot: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application. See\ + \ the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + title: "HubSpot Source Spec" + google-analytics-data-api: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Analytics developer application." + airbyte_secret: true + order: 2 + title: "Google Analytics (Data API) Spec" + intercom: + properties: + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + title: "Source Intercom Spec" + typeform: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + title: null + facebook-marketing: + properties: + credentials: + properties: + client_id: + title: "Client Id" + description: "The Client Id for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Facebook Marketing" + surveymonkey: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + title: null + bing-ads: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + title: "Bing Ads Spec" + monday: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Monday Spec" + amazon-ads: + properties: + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + title: "Amazon Ads Spec" + github: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + title: "GitHub Source Spec" + square: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + title: "Square Spec" + mailchimp: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Mailchimp Spec" + airtable: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + title: "Airtable Source Spec" + salesforce: + properties: + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + title: "Salesforce Source Spec" + lever-hiring: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + title: "Lever Hiring Source Spec" + google-drive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + title: "Google Drive Source Spec" + OAuthCredentialsConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + oneOf: + - title: airtable + $ref: "#/components/schemas/airtable" + - title: amazon-ads + $ref: "#/components/schemas/amazon-ads" + - title: amazon-seller-partner + $ref: "#/components/schemas/amazon-seller-partner" + - title: asana + $ref: "#/components/schemas/asana" + - title: azure-blob-storage + $ref: "#/components/schemas/azure-blob-storage" + - title: bing-ads + $ref: "#/components/schemas/bing-ads" + - title: facebook-marketing + $ref: "#/components/schemas/facebook-marketing" + - title: github + $ref: "#/components/schemas/github" + - title: gitlab + $ref: "#/components/schemas/gitlab" + - title: google-ads + $ref: "#/components/schemas/google-ads" + - title: google-analytics-data-api + $ref: "#/components/schemas/google-analytics-data-api" + - title: google-drive + $ref: "#/components/schemas/google-drive" + - title: google-search-console + $ref: "#/components/schemas/google-search-console" + - title: google-sheets + $ref: "#/components/schemas/google-sheets" + - title: hubspot + $ref: "#/components/schemas/hubspot" + - title: instagram + $ref: "#/components/schemas/instagram" + - title: intercom + $ref: "#/components/schemas/intercom" + - title: lever-hiring + $ref: "#/components/schemas/lever-hiring" + - title: linkedin-ads + $ref: "#/components/schemas/linkedin-ads" + - title: mailchimp + $ref: "#/components/schemas/mailchimp" + - title: microsoft-onedrive + $ref: "#/components/schemas/microsoft-onedrive" + - title: microsoft-sharepoint + $ref: "#/components/schemas/microsoft-sharepoint" + - title: microsoft-teams + $ref: "#/components/schemas/microsoft-teams" + - title: monday + $ref: "#/components/schemas/monday" + - title: notion + $ref: "#/components/schemas/notion" + - title: pinterest + $ref: "#/components/schemas/pinterest" + - title: retently + $ref: "#/components/schemas/retently" + - title: salesforce + $ref: "#/components/schemas/salesforce" + - title: shopify + $ref: "#/components/schemas/shopify" + - title: slack + $ref: "#/components/schemas/slack" + - title: smartsheets + $ref: "#/components/schemas/smartsheets" + - title: snapchat-marketing + $ref: "#/components/schemas/snapchat-marketing" + - title: snowflake + $ref: "#/components/schemas/snowflake" + - title: square + $ref: "#/components/schemas/square" + - title: surveymonkey + $ref: "#/components/schemas/surveymonkey" + - title: tiktok-marketing + $ref: "#/components/schemas/tiktok-marketing" + - title: trello + $ref: "#/components/schemas/trello" + - title: typeform + $ref: "#/components/schemas/typeform" + - title: youtube-analytics + $ref: "#/components/schemas/youtube-analytics" + - title: zendesk-chat + $ref: "#/components/schemas/zendesk-chat" + - title: zendesk-sunshine + $ref: "#/components/schemas/zendesk-sunshine" + - title: zendesk-support + $ref: "#/components/schemas/zendesk-support" + - title: zendesk-talk + $ref: "#/components/schemas/zendesk-talk" + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + oneOf: + - title: source-aha + $ref: "#/components/schemas/source-aha" + - title: source-7shifts + $ref: "#/components/schemas/source-7shifts" + - title: source-airbyte + $ref: "#/components/schemas/source-airbyte" + - title: source-aircall + $ref: "#/components/schemas/source-aircall" + - title: source-airtable + $ref: "#/components/schemas/source-airtable" + - title: source-algolia + $ref: "#/components/schemas/source-algolia" + - title: source-amazon-ads + $ref: "#/components/schemas/source-amazon-ads" + - title: source-amazon-seller-partner + $ref: "#/components/schemas/source-amazon-seller-partner" + - title: source-amazon-sqs + $ref: "#/components/schemas/source-amazon-sqs" + - title: source-amplitude + $ref: "#/components/schemas/source-amplitude" + - title: source-apify-dataset + $ref: "#/components/schemas/source-apify-dataset" + - title: source-appcues + $ref: "#/components/schemas/source-appcues" + - title: source-appfigures + $ref: "#/components/schemas/source-appfigures" + - title: source-appfollow + $ref: "#/components/schemas/source-appfollow" + - title: source-asana + $ref: "#/components/schemas/source-asana" + - title: source-auth0 + $ref: "#/components/schemas/source-auth0" + - title: source-aws-cloudtrail + $ref: "#/components/schemas/source-aws-cloudtrail" + - title: source-azure-blob-storage + $ref: "#/components/schemas/source-azure-blob-storage" + - title: source-azure-table + $ref: "#/components/schemas/source-azure-table" + - title: source-bamboo-hr + $ref: "#/components/schemas/source-bamboo-hr" + - title: source-basecamp + $ref: "#/components/schemas/source-basecamp" + - title: source-beamer + $ref: "#/components/schemas/source-beamer" + - title: source-bigquery + $ref: "#/components/schemas/source-bigquery" + - title: source-bing-ads + $ref: "#/components/schemas/source-bing-ads" + - title: source-bitly + $ref: "#/components/schemas/source-bitly" + - title: source-braintree + $ref: "#/components/schemas/source-braintree" + - title: source-braze + $ref: "#/components/schemas/source-braze" + - title: source-breezy-hr + $ref: "#/components/schemas/source-breezy-hr" + - title: source-brevo + $ref: "#/components/schemas/source-brevo" + - title: source-buildkite + $ref: "#/components/schemas/source-buildkite" + - title: source-buzzsprout + $ref: "#/components/schemas/source-buzzsprout" + - title: source-calendly + $ref: "#/components/schemas/source-calendly" + - title: source-canny + $ref: "#/components/schemas/source-canny" + - title: source-cart + $ref: "#/components/schemas/source-cart" + - title: source-chameleon + $ref: "#/components/schemas/source-chameleon" + - title: source-chargebee + $ref: "#/components/schemas/source-chargebee" + - title: source-chartmogul + $ref: "#/components/schemas/source-chartmogul" + - title: source-cimis + $ref: "#/components/schemas/source-cimis" + - title: source-clazar + $ref: "#/components/schemas/source-clazar" + - title: source-clickhouse + $ref: "#/components/schemas/source-clickhouse" + - title: source-clickup-api + $ref: "#/components/schemas/source-clickup-api" + - title: source-clockify + $ref: "#/components/schemas/source-clockify" + - title: source-close-com + $ref: "#/components/schemas/source-close-com" + - title: source-coda + $ref: "#/components/schemas/source-coda" + - title: source-coin-api + $ref: "#/components/schemas/source-coin-api" + - title: source-coinmarketcap + $ref: "#/components/schemas/source-coinmarketcap" + - title: source-configcat + $ref: "#/components/schemas/source-configcat" + - title: source-confluence + $ref: "#/components/schemas/source-confluence" + - title: source-convex + $ref: "#/components/schemas/source-convex" + - title: source-customer-io + $ref: "#/components/schemas/source-customer-io" + - title: source-datadog + $ref: "#/components/schemas/source-datadog" + - title: source-datascope + $ref: "#/components/schemas/source-datascope" + - title: source-dbt + $ref: "#/components/schemas/source-dbt" + - title: source-delighted + $ref: "#/components/schemas/source-delighted" + - title: source-dixa + $ref: "#/components/schemas/source-dixa" + - title: source-dockerhub + $ref: "#/components/schemas/source-dockerhub" + - title: source-dremio + $ref: "#/components/schemas/source-dremio" + - title: source-dropbox-sign + $ref: "#/components/schemas/source-dropbox-sign" + - title: source-dynamodb + $ref: "#/components/schemas/source-dynamodb" + - title: source-emailoctopus + $ref: "#/components/schemas/source-emailoctopus" + - title: source-eventbrite + $ref: "#/components/schemas/source-eventbrite" + - title: source-exchange-rates + $ref: "#/components/schemas/source-exchange-rates" + - title: source-ezofficeinventory + $ref: "#/components/schemas/source-ezofficeinventory" + - title: source-facebook-marketing + $ref: "#/components/schemas/source-facebook-marketing" + - title: source-faker + $ref: "#/components/schemas/source-faker" + - title: source-fauna + $ref: "#/components/schemas/source-fauna" + - title: source-file + $ref: "#/components/schemas/source-file" + - title: source-firebolt + $ref: "#/components/schemas/source-firebolt" + - title: source-fleetio + $ref: "#/components/schemas/source-fleetio" + - title: source-freshcaller + $ref: "#/components/schemas/source-freshcaller" + - title: source-freshchat + $ref: "#/components/schemas/source-freshchat" + - title: source-freshdesk + $ref: "#/components/schemas/source-freshdesk" + - title: source-freshsales + $ref: "#/components/schemas/source-freshsales" + - title: source-front + $ref: "#/components/schemas/source-front" + - title: source-gainsight-px + $ref: "#/components/schemas/source-gainsight-px" + - title: source-gcs + $ref: "#/components/schemas/source-gcs" + - title: source-getlago + $ref: "#/components/schemas/source-getlago" + - title: source-github + $ref: "#/components/schemas/source-github" + - title: source-gitlab + $ref: "#/components/schemas/source-gitlab" + - title: source-glassfrog + $ref: "#/components/schemas/source-glassfrog" + - title: source-gnews + $ref: "#/components/schemas/source-gnews" + - title: source-goldcast + $ref: "#/components/schemas/source-goldcast" + - title: source-google-ads + $ref: "#/components/schemas/source-google-ads" + - title: source-google-analytics-data-api + $ref: "#/components/schemas/source-google-analytics-data-api" + - title: source-google-directory + $ref: "#/components/schemas/source-google-directory" + - title: source-google-drive + $ref: "#/components/schemas/source-google-drive" + - title: source-google-pagespeed-insights + $ref: "#/components/schemas/source-google-pagespeed-insights" + - title: source-google-search-console + $ref: "#/components/schemas/source-google-search-console" + - title: source-google-sheets + $ref: "#/components/schemas/source-google-sheets" + - title: source-google-tasks + $ref: "#/components/schemas/source-google-tasks" + - title: source-google-webfonts + $ref: "#/components/schemas/source-google-webfonts" + - title: source-greenhouse + $ref: "#/components/schemas/source-greenhouse" + - title: source-gridly + $ref: "#/components/schemas/source-gridly" + - title: source-guru + $ref: "#/components/schemas/source-guru" + - title: source-hardcoded-records + $ref: "#/components/schemas/source-hardcoded-records" + - title: source-harvest + $ref: "#/components/schemas/source-harvest" + - title: source-height + $ref: "#/components/schemas/source-height" + - title: source-hibob + $ref: "#/components/schemas/source-hibob" + - title: source-high-level + $ref: "#/components/schemas/source-high-level" + - title: source-hubplanner + $ref: "#/components/schemas/source-hubplanner" + - title: source-hubspot + $ref: "#/components/schemas/source-hubspot" + - title: source-insightly + $ref: "#/components/schemas/source-insightly" + - title: source-instagram + $ref: "#/components/schemas/source-instagram" + - title: source-instatus + $ref: "#/components/schemas/source-instatus" + - title: source-intercom + $ref: "#/components/schemas/source-intercom" + - title: source-ip2whois + $ref: "#/components/schemas/source-ip2whois" + - title: source-iterable + $ref: "#/components/schemas/source-iterable" + - title: source-jira + $ref: "#/components/schemas/source-jira" + - title: source-jotform + $ref: "#/components/schemas/source-jotform" + - title: source-k6-cloud + $ref: "#/components/schemas/source-k6-cloud" + - title: source-kissmetrics + $ref: "#/components/schemas/source-kissmetrics" + - title: source-klarna + $ref: "#/components/schemas/source-klarna" + - title: source-klaviyo + $ref: "#/components/schemas/source-klaviyo" + - title: source-kyve + $ref: "#/components/schemas/source-kyve" + - title: source-launchdarkly + $ref: "#/components/schemas/source-launchdarkly" + - title: source-leadfeeder + $ref: "#/components/schemas/source-leadfeeder" + - title: source-lemlist + $ref: "#/components/schemas/source-lemlist" + - title: source-lever-hiring + $ref: "#/components/schemas/source-lever-hiring" + - title: source-linkedin-ads + $ref: "#/components/schemas/source-linkedin-ads" + - title: source-linkedin-pages + $ref: "#/components/schemas/source-linkedin-pages" + - title: source-linnworks + $ref: "#/components/schemas/source-linnworks" + - title: source-lob + $ref: "#/components/schemas/source-lob" + - title: source-lokalise + $ref: "#/components/schemas/source-lokalise" + - title: source-looker + $ref: "#/components/schemas/source-looker" + - title: source-luma + $ref: "#/components/schemas/source-luma" + - title: source-mailchimp + $ref: "#/components/schemas/source-mailchimp" + - title: source-mailgun + $ref: "#/components/schemas/source-mailgun" + - title: source-mailjet-sms + $ref: "#/components/schemas/source-mailjet-sms" + - title: source-marketo + $ref: "#/components/schemas/source-marketo" + - title: source-metabase + $ref: "#/components/schemas/source-metabase" + - title: source-microsoft-onedrive + $ref: "#/components/schemas/source-microsoft-onedrive" + - title: source-microsoft-sharepoint + $ref: "#/components/schemas/source-microsoft-sharepoint" + - title: source-microsoft-teams + $ref: "#/components/schemas/source-microsoft-teams" + - title: source-mixpanel + $ref: "#/components/schemas/source-mixpanel" + - title: source-monday + $ref: "#/components/schemas/source-monday" + - title: source-mongodb-v2 + $ref: "#/components/schemas/source-mongodb-v2" + - title: source-mssql + $ref: "#/components/schemas/source-mssql" + - title: source-my-hours + $ref: "#/components/schemas/source-my-hours" + - title: source-mysql + $ref: "#/components/schemas/source-mysql" + - title: source-netsuite + $ref: "#/components/schemas/source-netsuite" + - title: source-northpass-lms + $ref: "#/components/schemas/source-northpass-lms" + - title: source-notion + $ref: "#/components/schemas/source-notion" + - title: source-nylas + $ref: "#/components/schemas/source-nylas" + - title: source-nytimes + $ref: "#/components/schemas/source-nytimes" + - title: source-okta + $ref: "#/components/schemas/source-okta" + - title: source-omnisend + $ref: "#/components/schemas/source-omnisend" + - title: source-onesignal + $ref: "#/components/schemas/source-onesignal" + - title: source-oracle + $ref: "#/components/schemas/source-oracle" + - title: source-orb + $ref: "#/components/schemas/source-orb" + - title: source-orbit + $ref: "#/components/schemas/source-orbit" + - title: source-outbrain-amplify + $ref: "#/components/schemas/source-outbrain-amplify" + - title: source-outreach + $ref: "#/components/schemas/source-outreach" + - title: source-paypal-transaction + $ref: "#/components/schemas/source-paypal-transaction" + - title: source-paystack + $ref: "#/components/schemas/source-paystack" + - title: source-pendo + $ref: "#/components/schemas/source-pendo" + - title: source-pennylane + $ref: "#/components/schemas/source-pennylane" + - title: source-persistiq + $ref: "#/components/schemas/source-persistiq" + - title: source-pexels-api + $ref: "#/components/schemas/source-pexels-api" + - title: source-picqer + $ref: "#/components/schemas/source-picqer" + - title: source-pinterest + $ref: "#/components/schemas/source-pinterest" + - title: source-pipedrive + $ref: "#/components/schemas/source-pipedrive" + - title: source-piwik + $ref: "#/components/schemas/source-piwik" + - title: source-planhat + $ref: "#/components/schemas/source-planhat" + - title: source-pocket + $ref: "#/components/schemas/source-pocket" + - title: source-pokeapi + $ref: "#/components/schemas/source-pokeapi" + - title: source-polygon-stock-api + $ref: "#/components/schemas/source-polygon-stock-api" + - title: source-postgres + $ref: "#/components/schemas/source-postgres" + - title: source-posthog + $ref: "#/components/schemas/source-posthog" + - title: source-postmarkapp + $ref: "#/components/schemas/source-postmarkapp" + - title: source-prestashop + $ref: "#/components/schemas/source-prestashop" + - title: source-productboard + $ref: "#/components/schemas/source-productboard" + - title: source-productive + $ref: "#/components/schemas/source-productive" + - title: source-pypi + $ref: "#/components/schemas/source-pypi" + - title: source-qualaroo + $ref: "#/components/schemas/source-qualaroo" + - title: source-railz + $ref: "#/components/schemas/source-railz" + - title: source-recharge + $ref: "#/components/schemas/source-recharge" + - title: source-recreation + $ref: "#/components/schemas/source-recreation" + - title: source-recruitee + $ref: "#/components/schemas/source-recruitee" + - title: source-recurly + $ref: "#/components/schemas/source-recurly" + - title: source-reddit + $ref: "#/components/schemas/source-reddit" + - title: source-redshift + $ref: "#/components/schemas/source-redshift" + - title: source-retently + $ref: "#/components/schemas/source-retently" + - title: source-rki-covid + $ref: "#/components/schemas/source-rki-covid" + - title: source-rollbar + $ref: "#/components/schemas/source-rollbar" + - title: source-rss + $ref: "#/components/schemas/source-rss" + - title: source-s3 + $ref: "#/components/schemas/source-s3" + - title: source-salesforce + $ref: "#/components/schemas/source-salesforce" + - title: source-salesloft + $ref: "#/components/schemas/source-salesloft" + - title: source-sap-fieldglass + $ref: "#/components/schemas/source-sap-fieldglass" + - title: source-savvycal + $ref: "#/components/schemas/source-savvycal" + - title: source-scryfall + $ref: "#/components/schemas/source-scryfall" + - title: source-secoda + $ref: "#/components/schemas/source-secoda" + - title: source-sendgrid + $ref: "#/components/schemas/source-sendgrid" + - title: source-sendinblue + $ref: "#/components/schemas/source-sendinblue" + - title: source-senseforce + $ref: "#/components/schemas/source-senseforce" + - title: source-sentry + $ref: "#/components/schemas/source-sentry" + - title: source-sftp + $ref: "#/components/schemas/source-sftp" + - title: source-sftp-bulk + $ref: "#/components/schemas/source-sftp-bulk" + - title: source-shopify + $ref: "#/components/schemas/source-shopify" + - title: source-shortcut + $ref: "#/components/schemas/source-shortcut" + - title: source-shortio + $ref: "#/components/schemas/source-shortio" + - title: source-slack + $ref: "#/components/schemas/source-slack" + - title: source-smaily + $ref: "#/components/schemas/source-smaily" + - title: source-smartengage + $ref: "#/components/schemas/source-smartengage" + - title: source-smartsheets + $ref: "#/components/schemas/source-smartsheets" + - title: source-snapchat-marketing + $ref: "#/components/schemas/source-snapchat-marketing" + - title: source-snowflake + $ref: "#/components/schemas/source-snowflake" + - title: source-sonar-cloud + $ref: "#/components/schemas/source-sonar-cloud" + - title: source-spacex-api + $ref: "#/components/schemas/source-spacex-api" + - title: source-split-io + $ref: "#/components/schemas/source-split-io" + - title: source-square + $ref: "#/components/schemas/source-square" + - title: source-strava + $ref: "#/components/schemas/source-strava" + - title: source-stripe + $ref: "#/components/schemas/source-stripe" + - title: source-survey-sparrow + $ref: "#/components/schemas/source-survey-sparrow" + - title: source-surveymonkey + $ref: "#/components/schemas/source-surveymonkey" + - title: source-survicate + $ref: "#/components/schemas/source-survicate" + - title: source-teamwork + $ref: "#/components/schemas/source-teamwork" + - title: source-tempo + $ref: "#/components/schemas/source-tempo" + - title: source-the-guardian-api + $ref: "#/components/schemas/source-the-guardian-api" + - title: source-tiktok-marketing + $ref: "#/components/schemas/source-tiktok-marketing" + - title: source-trello + $ref: "#/components/schemas/source-trello" + - title: source-trustpilot + $ref: "#/components/schemas/source-trustpilot" + - title: source-tvmaze-schedule + $ref: "#/components/schemas/source-tvmaze-schedule" + - title: source-twilio + $ref: "#/components/schemas/source-twilio" + - title: source-twilio-taskrouter + $ref: "#/components/schemas/source-twilio-taskrouter" + - title: source-twitter + $ref: "#/components/schemas/source-twitter" + - title: source-typeform + $ref: "#/components/schemas/source-typeform" + - title: source-us-census + $ref: "#/components/schemas/source-us-census" + - title: source-vantage + $ref: "#/components/schemas/source-vantage" + - title: source-vwo + $ref: "#/components/schemas/source-vwo" + - title: source-webflow + $ref: "#/components/schemas/source-webflow" + - title: source-when-i-work + $ref: "#/components/schemas/source-when-i-work" + - title: source-whisky-hunter + $ref: "#/components/schemas/source-whisky-hunter" + - title: source-wikipedia-pageviews + $ref: "#/components/schemas/source-wikipedia-pageviews" + - title: source-woocommerce + $ref: "#/components/schemas/source-woocommerce" + - title: source-xkcd + $ref: "#/components/schemas/source-xkcd" + - title: source-yandex-metrica + $ref: "#/components/schemas/source-yandex-metrica" + - title: source-yotpo + $ref: "#/components/schemas/source-yotpo" + - title: source-youtube-analytics + $ref: "#/components/schemas/source-youtube-analytics" + - title: source-zendesk-chat + $ref: "#/components/schemas/source-zendesk-chat" + - title: source-zendesk-sunshine + $ref: "#/components/schemas/source-zendesk-sunshine" + - title: source-zendesk-support + $ref: "#/components/schemas/source-zendesk-support" + - title: source-zendesk-talk + $ref: "#/components/schemas/source-zendesk-talk" + - title: source-zenloop + $ref: "#/components/schemas/source-zenloop" + - title: source-zoho-crm + $ref: "#/components/schemas/source-zoho-crm" + - title: source-zoom + $ref: "#/components/schemas/source-zoom" + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + InitiateOauthRequest: + title: Root Type for initiate-oauth-post-body + description: POST body for initiating OAuth via the public API + required: + - redirectUrl + - workspaceId + - sourceType + type: object + example: + redirectUrl: "https://cloud.airbyte.io/v1/api/oauth/callback" + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + destinationId: 3d93b16c-ff5f-421c-8908-5a3c82088f14 + properties: + redirectUrl: + description: >- + The URL to redirect the user to with the OAuth secret stored in the secret_id query + string parameter after authentication is complete. + type: string + workspaceId: + format: uuid + description: The workspace to create the secret and eventually the full source. + type: string + oAuthInputConfiguration: + $ref: "#/components/schemas/OAuthInputConfiguration" + description: Input configuration for OAuth required by some sources. + sourceType: + $ref: "#/components/schemas/OAuthActorNames" + WorkspaceOAuthCredentialsRequest: + title: "Root Type for WorkspaceOAuthCredentials" + description: "POST body for creating/updating workspace level OAuth credentials" + required: + - "actorType" + - "name" + - "configuration" + type: "object" + properties: + actorType: + $ref: "#/components/schemas/ActorTypeEnum" + name: + $ref: "#/components/schemas/OAuthActorNames" + configuration: + $ref: "#/components/schemas/OAuthCredentialsConfiguration" + OAuthActorNames: + enum: + - airtable + - amazon-ads + - amazon-seller-partner + - asana + - azure-blob-storage + - bing-ads + - facebook-marketing + - github + - gitlab + - google-ads + - google-analytics-data-api + - google-drive + - google-search-console + - google-sheets + - hubspot + - instagram + - intercom + - lever-hiring + - linkedin-ads + - mailchimp + - microsoft-onedrive + - microsoft-sharepoint + - microsoft-teams + - monday + - notion + - pinterest + - retently + - salesforce + - slack + - smartsheets + - snapchat-marketing + - snowflake + - square + - surveymonkey + - tiktok-marketing + - trello + - typeform + - youtube-analytics + - zendesk-chat + - zendesk-sunshine + - zendesk-support + - zendesk-talk + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml new file mode 100644 index 00000000000..cb709028573 --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_streams.yaml @@ -0,0 +1,1149 @@ +--- +openapi: "3.1.0" +info: + title: "Streams" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /streams: + get: + tags: + - "public_streams" + - "public" + - "Streams" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/StreamPropertiesResponse" + description: + "Get the available streams properties for a source/destination\ + \ pair." + "400": + description: "Required parameters are missing" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getStreamProperties" + x-speakeasy-alias: "getStreamProperties" + x-speakeasy-group: "Streams" + summary: "Get stream properties" + parameters: + - name: "sourceId" + description: "ID of the source" + schema: + format: "UUID" + type: "string" + in: "query" + required: true + - name: "destinationId" + description: "ID of the destination" + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "ignoreCache" + description: + "If true pull the latest schema from the source, else pull from\ + \ cache (default false)" + schema: + type: "boolean" + default: false + in: "query" + required: false +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml new file mode 100644 index 00000000000..60c1bffccd1 --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_users.yaml @@ -0,0 +1,1147 @@ +--- +openapi: "3.1.0" +info: + title: "Users" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /users: + get: + tags: + - "public_users" + - "public" + - "Users" + summary: "List all users within an organization" + description: + "Organization Admin user can list all users within the same organization.\ + \ Also provide filtering on a list of user IDs or/and a list of user emails." + parameters: + - in: "query" + name: "organizationId" + schema: + type: "string" + format: "UUID" + required: true + - in: "query" + name: "ids" + schema: + type: "array" + items: + type: "string" + format: "UUID" + description: "List of user IDs to filter by" + - in: "query" + name: "emails" + schema: + type: "array" + items: + type: "string" + format: "email" + description: "List of user emails to filter by" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/UsersResponse" + description: "List Users." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listUsersWithinAnOrganization" + x-speakeasy-alias: "listUsersWithinAnOrganization" + x-speakeasy-group: "Users" +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml b/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml new file mode 100644 index 00000000000..5fed94153f6 --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_documentation_workspaces.yaml @@ -0,0 +1,2196 @@ +--- +openapi: "3.1.0" +info: + title: "Workspaces" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /workspaces: + get: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspacesResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listWorkspaces" + x-speakeasy-alias: "listWorkspaces" + x-speakeasy-group: "Workspaces" + summary: "List workspaces" + x-speakeasy-entity-operation: "Workspace#list" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to fetch. Empty list will\ + \ retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted workspaces in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of workspaces returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning workspaces. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceCreateRequest" + examples: + Workspace Creation Request Example: + value: + name: "Company Workspace Name" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Creation Response Example: + value: + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createWorkspace" + x-speakeasy-alias: "createWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Create a workspace" + x-speakeasy-entity-operation: "Workspace#create" + /workspaces/{workspaceId}: + parameters: + - name: "workspaceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + get: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + description: "Get a Workspace by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getWorkspace" + x-speakeasy-alias: "getWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Get Workspace details" + x-speakeasy-entity-operation: "Workspace#read" + patch: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceUpdateRequest" + examples: + Workspace Update Request Example: + value: + name: "Company Workspace Name" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Update Response Example: + value: + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "updateWorkspace" + x-speakeasy-alias: "updateWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Update a workspace" + x-speakeasy-entity-operation: "Workspace#update" + delete: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteWorkspace" + x-speakeasy-alias: "deleteWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Delete a Workspace" + x-speakeasy-entity-operation: "Workspace#delete" + /workspaces/{workspaceId}/oauthCredentials: + put: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceOAuthCredentialsRequest" + required: true + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + description: "OAuth credential override was successful." + "400": + description: "A field in the body has not been set appropriately." + "403": + description: "API key is invalid." + operationId: "createOrUpdateWorkspaceOAuthCredentials" + x-speakeasy-alias: "createOrUpdateWorkspaceOAuthCredentials" + x-speakeasy-group: "Workspaces" + summary: "Create OAuth override credentials for a workspace and source type." + description: + "Create/update a set of OAuth credentials to override the Airbyte-provided\ + \ OAuth credentials used for source/destination OAuth.\nIn order to determine\ + \ what the credential configuration needs to be, please see the connector\ + \ specification of the relevant source/destination." + parameters: + - name: "workspaceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + trello: + title: null + zendesk-chat: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Zendesk Chat Spec" + google-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + title: "Google Ads Spec" + google-search-console: + properties: + authorization: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + title: "Google Search Console Spec" + shopify: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + title: "Shopify Source CDK Specifications" + retently: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + title: "Retently Api Spec" + instagram: + properties: + client_id: + title: "Client Id" + description: "The Client ID for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Instagram" + azure-blob-storage: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "SourceAzureBlobStorageSpec" + zendesk-sunshine: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: null + snapchat-marketing: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + title: "Snapchat Marketing Spec" + gitlab: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + title: "Source Gitlab Spec" + snowflake: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + title: "Snowflake Source Spec" + microsoft-sharepoint: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft SharePoint Source Spec" + smartsheets: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + title: "Smartsheets Source Spec" + notion: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + title: "Notion Source Spec" + slack: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + title: "Slack Spec" + youtube-analytics: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + title: "YouTube Analytics Spec" + google-sheets: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + title: "Google Sheets Source Spec" + zendesk-talk: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + title: "Source Zendesk Talk Spec" + asana: + properties: + credentials: + properties: + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + title: "Asana Spec" + microsoft-teams: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Microsoft Teams developer application." + airbyte_secret: true + title: "Microsoft Teams Spec" + amazon-seller-partner: + properties: + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + title: "Amazon Seller Partner Spec" + linkedin-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + title: "Linkedin Ads Spec" + pinterest: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Pinterest Spec" + zendesk-support: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + title: "Source Zendesk Support Spec" + microsoft-onedrive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft OneDrive Source Spec" + tiktok-marketing: + properties: + credentials: + properties: + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + title: "TikTok Marketing Source Spec" + hubspot: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application. See\ + \ the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + title: "HubSpot Source Spec" + google-analytics-data-api: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Analytics developer application." + airbyte_secret: true + order: 2 + title: "Google Analytics (Data API) Spec" + intercom: + properties: + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + title: "Source Intercom Spec" + typeform: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + title: null + facebook-marketing: + properties: + credentials: + properties: + client_id: + title: "Client Id" + description: "The Client Id for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Facebook Marketing" + surveymonkey: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + title: null + bing-ads: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + title: "Bing Ads Spec" + monday: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Monday Spec" + amazon-ads: + properties: + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + title: "Amazon Ads Spec" + github: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + title: "GitHub Source Spec" + square: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + title: "Square Spec" + mailchimp: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Mailchimp Spec" + airtable: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + title: "Airtable Source Spec" + salesforce: + properties: + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + title: "Salesforce Source Spec" + lever-hiring: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + title: "Lever Hiring Source Spec" + google-drive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + title: "Google Drive Source Spec" + OAuthCredentialsConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + oneOf: + - title: airtable + $ref: "#/components/schemas/airtable" + - title: amazon-ads + $ref: "#/components/schemas/amazon-ads" + - title: amazon-seller-partner + $ref: "#/components/schemas/amazon-seller-partner" + - title: asana + $ref: "#/components/schemas/asana" + - title: azure-blob-storage + $ref: "#/components/schemas/azure-blob-storage" + - title: bing-ads + $ref: "#/components/schemas/bing-ads" + - title: facebook-marketing + $ref: "#/components/schemas/facebook-marketing" + - title: github + $ref: "#/components/schemas/github" + - title: gitlab + $ref: "#/components/schemas/gitlab" + - title: google-ads + $ref: "#/components/schemas/google-ads" + - title: google-analytics-data-api + $ref: "#/components/schemas/google-analytics-data-api" + - title: google-drive + $ref: "#/components/schemas/google-drive" + - title: google-search-console + $ref: "#/components/schemas/google-search-console" + - title: google-sheets + $ref: "#/components/schemas/google-sheets" + - title: hubspot + $ref: "#/components/schemas/hubspot" + - title: instagram + $ref: "#/components/schemas/instagram" + - title: intercom + $ref: "#/components/schemas/intercom" + - title: lever-hiring + $ref: "#/components/schemas/lever-hiring" + - title: linkedin-ads + $ref: "#/components/schemas/linkedin-ads" + - title: mailchimp + $ref: "#/components/schemas/mailchimp" + - title: microsoft-onedrive + $ref: "#/components/schemas/microsoft-onedrive" + - title: microsoft-sharepoint + $ref: "#/components/schemas/microsoft-sharepoint" + - title: microsoft-teams + $ref: "#/components/schemas/microsoft-teams" + - title: monday + $ref: "#/components/schemas/monday" + - title: notion + $ref: "#/components/schemas/notion" + - title: pinterest + $ref: "#/components/schemas/pinterest" + - title: retently + $ref: "#/components/schemas/retently" + - title: salesforce + $ref: "#/components/schemas/salesforce" + - title: shopify + $ref: "#/components/schemas/shopify" + - title: slack + $ref: "#/components/schemas/slack" + - title: smartsheets + $ref: "#/components/schemas/smartsheets" + - title: snapchat-marketing + $ref: "#/components/schemas/snapchat-marketing" + - title: snowflake + $ref: "#/components/schemas/snowflake" + - title: square + $ref: "#/components/schemas/square" + - title: surveymonkey + $ref: "#/components/schemas/surveymonkey" + - title: tiktok-marketing + $ref: "#/components/schemas/tiktok-marketing" + - title: trello + $ref: "#/components/schemas/trello" + - title: typeform + $ref: "#/components/schemas/typeform" + - title: youtube-analytics + $ref: "#/components/schemas/youtube-analytics" + - title: zendesk-chat + $ref: "#/components/schemas/zendesk-chat" + - title: zendesk-sunshine + $ref: "#/components/schemas/zendesk-sunshine" + - title: zendesk-support + $ref: "#/components/schemas/zendesk-support" + - title: zendesk-talk + $ref: "#/components/schemas/zendesk-talk" + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + InitiateOauthRequest: + title: Root Type for initiate-oauth-post-body + description: POST body for initiating OAuth via the public API + required: + - redirectUrl + - workspaceId + - sourceType + type: object + example: + redirectUrl: "https://cloud.airbyte.io/v1/api/oauth/callback" + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + destinationId: 3d93b16c-ff5f-421c-8908-5a3c82088f14 + properties: + redirectUrl: + description: >- + The URL to redirect the user to with the OAuth secret stored in the secret_id query + string parameter after authentication is complete. + type: string + workspaceId: + format: uuid + description: The workspace to create the secret and eventually the full source. + type: string + oAuthInputConfiguration: + $ref: "#/components/schemas/OAuthInputConfiguration" + description: Input configuration for OAuth required by some sources. + sourceType: + $ref: "#/components/schemas/OAuthActorNames" + WorkspaceOAuthCredentialsRequest: + title: "Root Type for WorkspaceOAuthCredentials" + description: "POST body for creating/updating workspace level OAuth credentials" + required: + - "actorType" + - "name" + - "configuration" + type: "object" + properties: + actorType: + $ref: "#/components/schemas/ActorTypeEnum" + name: + $ref: "#/components/schemas/OAuthActorNames" + configuration: + $ref: "#/components/schemas/OAuthCredentialsConfiguration" + OAuthActorNames: + enum: + - airtable + - amazon-ads + - amazon-seller-partner + - asana + - azure-blob-storage + - bing-ads + - facebook-marketing + - github + - gitlab + - google-ads + - google-analytics-data-api + - google-drive + - google-search-console + - google-sheets + - hubspot + - instagram + - intercom + - lever-hiring + - linkedin-ads + - mailchimp + - microsoft-onedrive + - microsoft-sharepoint + - microsoft-teams + - monday + - notion + - pinterest + - retently + - salesforce + - slack + - smartsheets + - snapchat-marketing + - snowflake + - square + - surveymonkey + - tiktok-marketing + - trello + - typeform + - youtube-analytics + - zendesk-chat + - zendesk-sunshine + - zendesk-support + - zendesk-talk + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT +security: + - bearerAuth: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_sdk.yaml b/airbyte-api/server-api/src/main/openapi/api_sdk.yaml new file mode 100644 index 00000000000..28eee392c9b --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_sdk.yaml @@ -0,0 +1,65046 @@ +--- +openapi: "3.1.0" +info: + title: "airbyte-api" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /health: + get: + tags: + - "public_health" + - "public" + responses: + "200": + description: "Successful operation" + operationId: "getHealthCheck" + summary: "Health Check" + security: [] + x-speakeasy-alias: "getHealthCheck" + x-speakeasy-group: "Health" + /jobs: + get: + tags: + - "public_jobs" + - "public" + - "Jobs" + parameters: + - name: "connectionId" + description: "Filter the Jobs by connectionId." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of Jobs returned. The default is\ + \ 20 Jobs." + schema: + format: "int32" + default: 20 + maximum: 100 + minimum: 1 + type: "integer" + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning Jobs. The default\ + \ is 0." + schema: + format: "int32" + default: 0 + minimum: 0 + type: "integer" + in: "query" + - name: "jobType" + description: "Filter the Jobs by jobType." + schema: + $ref: "#/components/schemas/JobTypeEnum" + in: "query" + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list jobs for. Empty\ + \ list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "status" + description: "The Job status you want to filter by" + schema: + $ref: "#/components/schemas/JobStatusEnum" + in: "query" + required: false + - name: "createdAtStart" + description: "The start date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "createdAtEnd" + description: "The end date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "updatedAtStart" + description: "The start date to filter by" + schema: + type: "string" + format: "date-time" + example: 1687450500000 + in: "query" + required: false + - name: "updatedAtEnd" + description: "The end date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "orderBy" + description: "The field and method to use for ordering" + schema: + type: "string" + pattern: "\\w+|(ASC|DESC)" + in: "query" + required: false + example: "updatedAt|DESC" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobsResponse" + examples: + Job List Response Example: + value: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + description: "List all the Jobs by connectionId." + "403": + description: "Not allowed" + operationId: "listJobs" + summary: "List Jobs by sync type" + x-speakeasy-alias: "listJobs" + x-speakeasy-group: "Jobs" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/JobCreateRequest" + examples: + Job Creation Request Example: + value: + connectionId: "e735894a-e773-4938-969f-45f53957b75b" + jobType: "sync" + required: true + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Creation Response Example: + value: + jobId: 1234 + status: "running" + jobType: "sync" + description: + "Kicks off a new Job based on the JobType. The connectionId\ + \ is the resource that Job will be run for." + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createJob" + summary: "Trigger a sync or reset job of a connection" + x-speakeasy-alias: "createJob" + x-speakeasy-group: "Jobs" + /jobs/{jobId}: + get: + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Get Response Example: + value: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + description: "Get a Job by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getJob" + x-speakeasy-alias: "getJob" + x-speakeasy-group: "Jobs" + summary: "Get Job status and details" + delete: + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + description: "Cancel a Job." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "cancelJob" + x-speakeasy-alias: "cancelJob" + x-speakeasy-group: "Jobs" + summary: "Cancel a running Job" + parameters: + - name: "jobId" + schema: + format: "int64" + type: "integer" + in: "path" + required: true + /sources: + get: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourcesResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listSources" + x-speakeasy-alias: "listSources" + x-speakeasy-group: "Sources" + summary: "List sources" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list sources for. Empty\ + \ list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + example: "df08f6b0-b364-4cc1-9b3f-96f5d2fccfb2,b0796797-de23-4fc7-a5e2-7e131314718c" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted sources in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of sources returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning sources. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCreateRequest" + examples: + Source Creation Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Creation Response Example: + value: + sourceId: "0c31738c-0b2d-4887-b506-e2cd1c39cc35" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSource" + x-speakeasy-alias: "createSource" + x-speakeasy-group: "Sources" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + /sources/{sourceId}: + get: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Get Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSource" + x-speakeasy-alias: "getSource" + x-speakeasy-group: "Sources" + summary: "Get Source details" + patch: + tags: + - "public_sources" + - "public" + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePatchRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a Source" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchSource" + x-speakeasy-alias: "patchSource" + x-speakeasy-group: "Sources" + summary: "Update a Source" + put: + tags: + - "public_sources" + - "public" + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePutRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a source and fully overwrite it" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSource" + x-speakeasy-alias: "putSource" + x-speakeasy-group: "Sources" + summary: "Update a Source and fully overwrite it" + x-speakeasy-entity-operation: "Source#update" + delete: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSource" + x-speakeasy-alias: "deleteSource" + x-speakeasy-group: "Sources" + summary: "Delete a Source" + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations: + get: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationsResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listDestinations" + x-speakeasy-alias: "listDestinations" + x-speakeasy-group: "Destinations" + summary: "List destinations" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list destinations for.\ + \ Empty list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted destinations in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of destinations returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning destinations. The\ + \ default is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationCreateRequest" + examples: + Destination Creation Request Example: + value: + name: "Postgres" + workspaceId: "2155ae5a-de39-4808-af6a-16fe7b8b4ed2" + configuration: + airbyte_destination_name: "postgres" + port: 5432 + schema: "public" + ssl_mode: + mode: "prefer" + tunnel_method: + tunnel_method: "NO_TUNNEL" + host: "localhost" + database: "postgres" + username: "postgres" + password: "test" + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Creation Response Example: + value: + destinationId: "af0c3c67-aa61-419f-8922-95b0bf840e86" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "createDestination" + x-speakeasy-alias: "createDestination" + x-speakeasy-group: "Destinations" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob\ + \ containing the configuration for the source." + /destinations/{destinationId}: + get: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Get Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "My Destination" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestination" + x-speakeasy-alias: "getDestination" + x-speakeasy-group: "Destinations" + summary: "Get Destination details" + delete: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestination" + x-speakeasy-alias: "deleteDestination" + x-speakeasy-group: "Destinations" + summary: "Delete a Destination" + patch: + tags: + - "public_destinations" + - "public" + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPatchRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Destination" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Update a Destination" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchDestination" + x-speakeasy-alias: "patchDestination" + x-speakeasy-group: "Destinations" + summary: "Update a Destination" + put: + tags: + - "public_destinations" + - "public" + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPutRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Destination" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Update a Destination and fully overwrite it" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestination" + x-speakeasy-alias: "putDestination" + x-speakeasy-group: "Destinations" + summary: "Update a Destination and fully overwrite it" + x-speakeasy-entity-operation: "Destination#update" + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources/initiateOAuth: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/InitiateOauthRequest" + required: true + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + $ref: "#/components/responses/InitiateOauthResponse" + "400": + description: "A field in the body has not been set appropriately." + "403": + description: "API key is invalid." + operationId: "initiateOAuth" + x-speakeasy-alias: "initiateOAuth" + x-speakeasy-group: "Sources" + summary: "Initiate OAuth for a source" + description: + "Given a source ID, workspace ID, and redirect URL, initiates OAuth\ + \ for the source.\n\nThis returns a fully formed URL for performing user authentication\ + \ against the relevant source identity provider (IdP). Once authentication\ + \ has been completed, the IdP will redirect to an Airbyte endpoint which will\ + \ save the access and refresh tokens off as a secret and return the secret\ + \ ID to the redirect URL specified in the `secret_id` query string parameter.\n\ + \nThat secret ID can be used to create a source with credentials in place\ + \ of actual tokens." + /connections: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionCreateRequest" + examples: + Connection Creation Request Example: + value: + sourceId: "95e66a59-8045-4307-9678-63bc3c9b8c93" + destinationId: "e478de0d-a3a0-475c-b019-25f7dd29e281" + name: "Postgres-to-Bigquery" + required: true + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Creation Response Example: + value: + connectionId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createConnection" + x-speakeasy-alias: "createConnection" + x-speakeasy-group: "Connections" + summary: "Create a connection" + x-speakeasy-entity-operation: "Connection#create" + get: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionsResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listConnections" + x-speakeasy-alias: "listConnections" + x-speakeasy-group: "Connections" + summary: "List connections" + x-speakeasy-entity-operation: "Connection#list" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list connections for.\ + \ Empty list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted connections in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of Connections returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning Connections. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + /connections/{connectionId}: + get: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Postgres To Snowflake" + sourceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + destinationId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Get a Connection by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getConnection" + x-speakeasy-alias: "getConnection" + x-speakeasy-group: "Connections" + summary: "Get Connection details" + x-speakeasy-entity-operation: "Connection#read" + patch: + tags: + - "public_connections" + - "public" + - "Connections" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionPatchRequest" + examples: + Connection Update Request Example: + value: + sourceId: "95e66a59-8045-4307-9678-63bc3c9b8c93" + destinationId: "e478de0d-a3a0-475c-b019-25f7dd29e281" + name: "Postgres-to-Bigquery" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Postgres To Snowflake" + sourceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + destinationId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a Connection by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchConnection" + x-speakeasy-alias: "patchConnection" + x-speakeasy-group: "Connections" + summary: "Update Connection details" + x-speakeasy-entity-operation: "Connection#update" + delete: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteConnection" + x-speakeasy-alias: "deleteConnection" + x-speakeasy-group: "Connections" + summary: "Delete a Connection" + x-speakeasy-entity-operation: "Connection#delete" + parameters: + - name: "connectionId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /streams: + get: + tags: + - "public_streams" + - "public" + - "Streams" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/StreamPropertiesResponse" + description: + "Get the available streams properties for a source/destination\ + \ pair." + "400": + description: "Required parameters are missing" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getStreamProperties" + x-speakeasy-alias: "getStreamProperties" + x-speakeasy-group: "Streams" + summary: "Get stream properties" + parameters: + - name: "sourceId" + description: "ID of the source" + schema: + format: "UUID" + type: "string" + in: "query" + required: true + - name: "destinationId" + description: "ID of the destination" + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "ignoreCache" + description: + "If true pull the latest schema from the source, else pull from\ + \ cache (default false)" + schema: + type: "boolean" + default: false + in: "query" + required: false + /workspaces: + get: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspacesResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listWorkspaces" + x-speakeasy-alias: "listWorkspaces" + x-speakeasy-group: "Workspaces" + summary: "List workspaces" + x-speakeasy-entity-operation: "Workspace#list" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to fetch. Empty list will\ + \ retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted workspaces in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of workspaces returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning workspaces. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceCreateRequest" + examples: + Workspace Creation Request Example: + value: + name: "Company Workspace Name" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Creation Response Example: + value: + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createWorkspace" + x-speakeasy-alias: "createWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Create a workspace" + x-speakeasy-entity-operation: "Workspace#create" + /workspaces/{workspaceId}: + parameters: + - name: "workspaceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + get: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + description: "Get a Workspace by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getWorkspace" + x-speakeasy-alias: "getWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Get Workspace details" + x-speakeasy-entity-operation: "Workspace#read" + patch: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceUpdateRequest" + examples: + Workspace Update Request Example: + value: + name: "Company Workspace Name" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Update Response Example: + value: + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "updateWorkspace" + x-speakeasy-alias: "updateWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Update a workspace" + x-speakeasy-entity-operation: "Workspace#update" + delete: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteWorkspace" + x-speakeasy-alias: "deleteWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Delete a Workspace" + x-speakeasy-entity-operation: "Workspace#delete" + /workspaces/{workspaceId}/oauthCredentials: + put: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceOAuthCredentialsRequest" + required: true + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + description: "OAuth credential override was successful." + "400": + description: "A field in the body has not been set appropriately." + "403": + description: "API key is invalid." + operationId: "createOrUpdateWorkspaceOAuthCredentials" + x-speakeasy-alias: "createOrUpdateWorkspaceOAuthCredentials" + x-speakeasy-group: "Workspaces" + summary: "Create OAuth override credentials for a workspace and source type." + description: + "Create/update a set of OAuth credentials to override the Airbyte-provided\ + \ OAuth credentials used for source/destination OAuth.\nIn order to determine\ + \ what the credential configuration needs to be, please see the connector\ + \ specification of the relevant source/destination." + parameters: + - name: "workspaceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /permissions/{permissionId}: + parameters: + - name: "permissionId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + get: + tags: + - "public_permissions" + - "public" + - "Permissions" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: "Get a Permission by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "getPermission" + summary: "Get Permission details" + x-speakeasy-alias: "getPermission" + x-speakeasy-group: "Permissions" + x-speakeasy-entity-operation: "Permission#read" + patch: + tags: + - "public_permissions" + - "public" + - "Permissions" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionUpdateRequest" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: "Successful updated" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "updatePermission" + summary: "Update a permission" + x-speakeasy-alias: "updatePermission" + x-speakeasy-group: "Permissions" + x-speakeasy-entity-operation: "Permission#update" + delete: + tags: + - "public_permissions" + - "public" + - "Permissions" + responses: + "204": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "deletePermission" + x-speakeasy-alias: "deletePermission" + x-speakeasy-group: "Permissions" + summary: "Delete a Permission" + x-speakeasy-entity-operation: "Permission#delete" + /permissions: + get: + tags: + - "public_permissions" + - "public" + - "Permissions" + parameters: + - name: "userId" + description: "User Id in permission." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "organizationId" + description: + "This is required if you want to read someone else's permissions,\ + \ and you should have organization admin or a higher role." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionsResponse" + description: "List Permissions." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listPermissions" + x-speakeasy-alias: "listPermissions" + x-speakeasy-group: "Permissions" + summary: "List Permissions by user id" + post: + tags: + - "public_permissions" + - "public" + - "Permissions" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionCreateRequest" + examples: + Permission Creation Request Example: + value: + permissionType: "workspace_admin" + userId: "7d08fd6c-531e-4a00-937e-3d355f253e63" + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + examples: + Permission Creation Response Example: + value: + permissionId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + permissionType: "workspace_admin" + userId: "7d08fd6c-531e-4a00-937e-3d355f253e63" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createPermission" + x-speakeasy-alias: "createPermission" + x-speakeasy-group: "Permissions" + summary: "Create a permission" + x-speakeasy-entity-operation: "Permission#create" + /organizations: + get: + tags: + - "public_organizations" + - "public" + - "Organizations" + summary: "List all organizations for a user" + description: "Lists users organizations." + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationsResponse" + description: "List user's organizations." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listOrganizationsForUser" + x-speakeasy-alias: "listOrganizationsForUser" + x-speakeasy-group: "Organizations" + /users: + get: + tags: + - "public_users" + - "public" + - "Users" + summary: "List all users within an organization" + description: + "Organization Admin user can list all users within the same organization.\ + \ Also provide filtering on a list of user IDs or/and a list of user emails." + parameters: + - in: "query" + name: "organizationId" + schema: + type: "string" + format: "UUID" + required: true + - in: "query" + name: "ids" + schema: + type: "array" + items: + type: "string" + format: "UUID" + description: "List of user IDs to filter by" + - in: "query" + name: "emails" + schema: + type: "array" + items: + type: "string" + format: "email" + description: "List of user emails to filter by" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/UsersResponse" + description: "List Users." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listUsersWithinAnOrganization" + x-speakeasy-alias: "listUsersWithinAnOrganization" + x-speakeasy-group: "Users" +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Source" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: "Connection" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: "Destination" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: "Workspace" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: "User" + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: "Permission" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + source-trello: + type: "object" + required: + - "key" + - "token" + - "start_date" + - "sourceType" + properties: + key: + type: "string" + title: "API key" + description: + "Trello API key. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + token: + type: "string" + title: "API token" + description: + "Trello API token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 2 + board_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9a-fA-F]{24}$" + title: "Trello Board IDs" + description: + "IDs of the boards to replicate data from. If left empty, data\ + \ from all boards to which you have access will be replicated. Please\ + \ note that this is not the 8-character ID in the board's shortLink (URL\ + \ of the board). Rather, what is required here is the 24-character ID\ + \ usually returned by the API" + order: 3 + sourceType: + title: "trello" + const: "trello" + enum: + - "trello" + order: 0 + type: "string" + source-trello-update: + type: "object" + required: + - "key" + - "token" + - "start_date" + properties: + key: + type: "string" + title: "API key" + description: + "Trello API key. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + token: + type: "string" + title: "API token" + description: + "Trello API token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 2 + board_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9a-fA-F]{24}$" + title: "Trello Board IDs" + description: + "IDs of the boards to replicate data from. If left empty, data\ + \ from all boards to which you have access will be replicated. Please\ + \ note that this is not the 8-character ID in the board's shortLink (URL\ + \ of the board). Rather, what is required here is the 24-character ID\ + \ usually returned by the API" + order: 3 + source-the-guardian-api: + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + description: + "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: + "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: + "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: + "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: + "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + sourceType: + title: "the-guardian-api" + const: "the-guardian-api" + enum: + - "the-guardian-api" + order: 0 + type: "string" + source-the-guardian-api-update: + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + description: + "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: + "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: + "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: + "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: + "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + source-harvest: + title: "Harvest Spec" + type: "object" + required: + - "account_id" + - "replication_start_date" + - "sourceType" + properties: + account_id: + title: "Account ID" + description: + "Harvest account ID. Required for all Harvest requests in pair\ + \ with Personal Access Token" + airbyte_secret: true + type: "string" + order: 0 + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + order: 1 + format: "date-time" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: "object" + order: 3 + oneOf: + - type: "object" + title: "Authenticate via Harvest (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_token: + title: "Personal Access Token" + description: + "Log into Harvest and then create new personal access token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "harvest" + const: "harvest" + enum: + - "harvest" + order: 0 + type: "string" + source-harvest-update: + title: "Harvest Spec" + type: "object" + required: + - "account_id" + - "replication_start_date" + properties: + account_id: + title: "Account ID" + description: + "Harvest account ID. Required for all Harvest requests in pair\ + \ with Personal Access Token" + airbyte_secret: true + type: "string" + order: 0 + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + order: 1 + format: "date-time" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: "object" + order: 3 + oneOf: + - type: "object" + title: "Authenticate via Harvest (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_token: + title: "Personal Access Token" + description: + "Log into Harvest and then create new personal access token." + type: "string" + airbyte_secret: true + source-yotpo: + type: "object" + required: + - "access_token" + - "app_key" + - "start_date" + - "email" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\ + \ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)" + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + app_key: + type: "string" + description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)" + title: "App Key" + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + email: + type: "string" + description: "Email address registered with yotpo." + title: "Registered email address" + default: "example@gmail.com" + order: 3 + sourceType: + title: "yotpo" + const: "yotpo" + enum: + - "yotpo" + order: 0 + type: "string" + source-yotpo-update: + type: "object" + required: + - "access_token" + - "app_key" + - "start_date" + - "email" + properties: + access_token: + type: "string" + description: + "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\ + \ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)" + title: "Access Token" + airbyte_secret: true + order: 0 + app_key: + type: "string" + description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)" + title: "App Key" + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + email: + type: "string" + description: "Email address registered with yotpo." + title: "Registered email address" + default: "example@gmail.com" + order: 3 + source-prestashop: + title: "PrestaShop Spec" + type: "object" + required: + - "access_key" + - "url" + - "start_date" + - "sourceType" + properties: + access_key: + type: "string" + title: "Access Key" + description: + "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + format: "date" + order: 2 + sourceType: + title: "prestashop" + const: "prestashop" + enum: + - "prestashop" + order: 0 + type: "string" + source-prestashop-update: + title: "PrestaShop Spec" + type: "object" + required: + - "access_key" + - "url" + - "start_date" + properties: + access_key: + type: "string" + title: "Access Key" + description: + "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 + airbyte_secret: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + format: "date" + order: 2 + source-netsuite: + title: "Netsuite Spec" + type: "object" + required: + - "realm" + - "consumer_key" + - "consumer_secret" + - "token_key" + - "token_secret" + - "start_datetime" + - "sourceType" + properties: + realm: + type: "string" + title: "Realm (Account Id)" + description: + "Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1,\ + \ as for the `sandbox`" + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + consumer_key: + type: "string" + title: "Consumer Key" + description: "Consumer key associated with your integration" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + consumer_secret: + type: "string" + title: "Consumer Secret" + description: "Consumer secret associated with your integration" + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_key: + type: "string" + title: "Token Key (Token Id)" + description: "Access token key" + order: 3 + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_secret: + type: "string" + title: "Token Secret" + description: "Access token secret" + order: 4 + airbyte_secret: true + x-speakeasy-param-sensitive: true + object_types: + type: "array" + title: "Object Types" + items: + type: "string" + description: + "The API names of the Netsuite objects you want to sync. Setting\ + \ this speeds up the connection setup process by limiting the number of\ + \ schemas that need to be retrieved from Netsuite." + order: 5 + examples: + - "customer" + - "salesorder" + - "etc" + default: [] + start_datetime: + type: "string" + title: "Start Date" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DDTHH:mm:ssZ\"" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + window_in_days: + type: "integer" + title: "Window in Days" + description: + "The amount of days used to query the data with date chunks.\ + \ Set smaller value, if you have lots of data." + order: 7 + default: 30 + sourceType: + title: "netsuite" + const: "netsuite" + enum: + - "netsuite" + order: 0 + type: "string" + source-netsuite-update: + title: "Netsuite Spec" + type: "object" + required: + - "realm" + - "consumer_key" + - "consumer_secret" + - "token_key" + - "token_secret" + - "start_datetime" + properties: + realm: + type: "string" + title: "Realm (Account Id)" + description: + "Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1,\ + \ as for the `sandbox`" + order: 0 + airbyte_secret: true + consumer_key: + type: "string" + title: "Consumer Key" + description: "Consumer key associated with your integration" + order: 1 + airbyte_secret: true + consumer_secret: + type: "string" + title: "Consumer Secret" + description: "Consumer secret associated with your integration" + order: 2 + airbyte_secret: true + token_key: + type: "string" + title: "Token Key (Token Id)" + description: "Access token key" + order: 3 + airbyte_secret: true + token_secret: + type: "string" + title: "Token Secret" + description: "Access token secret" + order: 4 + airbyte_secret: true + object_types: + type: "array" + title: "Object Types" + items: + type: "string" + description: + "The API names of the Netsuite objects you want to sync. Setting\ + \ this speeds up the connection setup process by limiting the number of\ + \ schemas that need to be retrieved from Netsuite." + order: 5 + examples: + - "customer" + - "salesorder" + - "etc" + default: [] + start_datetime: + type: "string" + title: "Start Date" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DDTHH:mm:ssZ\"" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + window_in_days: + type: "integer" + title: "Window in Days" + description: + "The amount of days used to query the data with date chunks.\ + \ Set smaller value, if you have lots of data." + order: 7 + default: 30 + source-convex: + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + - "sourceType" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "convex" + const: "convex" + enum: + - "convex" + order: 0 + type: "string" + source-convex-update: + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + source-recurly: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recurly API Key. See the docs for more information on how to generate this key." + order: 0 + x-speakeasy-param-sensitive: true + begin_time: + type: "string" + description: + "ISO8601 timestamp from which the replication from Recurly\ + \ API will start from." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 1 + end_time: + type: "string" + description: + "ISO8601 timestamp to which the replication from Recurly API\ + \ will stop. Records after that date won't be imported." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 2 + sourceType: + title: "recurly" + const: "recurly" + enum: + - "recurly" + order: 0 + type: "string" + source-recurly-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recurly API Key. See the docs for more information on how to generate this key." + order: 0 + begin_time: + type: "string" + description: + "ISO8601 timestamp from which the replication from Recurly\ + \ API will start from." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 1 + end_time: + type: "string" + description: + "ISO8601 timestamp to which the replication from Recurly API\ + \ will stop. Records after that date won't be imported." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 2 + source-pennylane: + type: "object" + required: + - "start_time" + - "api_key" + - "sourceType" + properties: + start_time: + type: "string" + order: 0 + title: "Start time" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "pennylane" + const: "pennylane" + enum: + - "pennylane" + order: 0 + type: "string" + source-pennylane-update: + type: "object" + required: + - "start_time" + - "api_key" + properties: + start_time: + type: "string" + order: 0 + title: "Start time" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + source-teamwork: + type: "object" + required: + - "username" + - "site_name" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + site_name: + type: "string" + description: "The teamwork site name appearing at the url" + order: 2 + title: "Site Name" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "teamwork" + const: "teamwork" + enum: + - "teamwork" + order: 0 + type: "string" + source-teamwork-update: + type: "object" + required: + - "username" + - "site_name" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + site_name: + type: "string" + description: "The teamwork site name appearing at the url" + order: 2 + title: "Site Name" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-zendesk-chat: + title: "Zendesk Chat Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Chat API, in the format YYYY-MM-DDT00:00:00Z." + examples: + - "2021-02-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + subdomain: + type: "string" + title: "Subdomain" + description: + "Required if you access Zendesk Chat from a Zendesk Support\ + \ subdomain." + default: "" + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "credentials" + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access Token" + required: + - "credentials" + - "access_token" + properties: + credentials: + type: "string" + const: "access_token" + order: 0 + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: "The Access Token to make authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zendesk-chat" + const: "zendesk-chat" + enum: + - "zendesk-chat" + order: 0 + type: "string" + source-zendesk-chat-update: + title: "Zendesk Chat Spec" + type: "object" + required: + - "start_date" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Chat API, in the format YYYY-MM-DDT00:00:00Z." + examples: + - "2021-02-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + subdomain: + type: "string" + title: "Subdomain" + description: + "Required if you access Zendesk Chat from a Zendesk Support\ + \ subdomain." + default: "" + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "credentials" + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + - type: "object" + title: "Access Token" + required: + - "credentials" + - "access_token" + properties: + credentials: + type: "string" + const: "access_token" + order: 0 + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: "The Access Token to make authenticated requests." + airbyte_secret: true + source-when-i-work: + type: "object" + required: + - "email" + - "password" + - "sourceType" + properties: + email: + type: "string" + description: "Email of your when-i-work account" + title: "Email" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + password: + type: "string" + description: "Password for your when-i-work account" + title: "Password" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "when-i-work" + const: "when-i-work" + enum: + - "when-i-work" + order: 0 + type: "string" + source-when-i-work-update: + type: "object" + required: + - "email" + - "password" + properties: + email: + type: "string" + description: "Email of your when-i-work account" + title: "Email" + airbyte_secret: true + order: 0 + password: + type: "string" + description: "Password for your when-i-work account" + title: "Password" + airbyte_secret: true + order: 1 + source-my-hours: + title: "My Hours Spec" + type: "object" + required: + - "email" + - "password" + - "start_date" + - "sourceType" + properties: + email: + title: "Email" + type: "string" + description: "Your My Hours username" + example: "john@doe.com" + password: + title: "Password" + type: "string" + description: "The password associated to the username" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: "Start date for collecting time logs" + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: "Time logs batch size" + description: "Pagination size used for retrieving logs in days" + examples: + - 30 + type: "integer" + minimum: 1 + maximum: 365 + default: 30 + sourceType: + title: "my-hours" + const: "my-hours" + enum: + - "my-hours" + order: 0 + type: "string" + source-my-hours-update: + title: "My Hours Spec" + type: "object" + required: + - "email" + - "password" + - "start_date" + properties: + email: + title: "Email" + type: "string" + description: "Your My Hours username" + example: "john@doe.com" + password: + title: "Password" + type: "string" + description: "The password associated to the username" + airbyte_secret: true + start_date: + title: "Start Date" + description: "Start date for collecting time logs" + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: "Time logs batch size" + description: "Pagination size used for retrieving logs in days" + examples: + - 30 + type: "integer" + minimum: 1 + maximum: 365 + default: 30 + source-7shifts: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token to use for authentication. Generate it in the\ + \ 7shifts Developer Tools." + name: "access_token" + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "7shifts" + const: "7shifts" + enum: + - "7shifts" + order: 0 + type: "string" + source-7shifts-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Access token to use for authentication. Generate it in the\ + \ 7shifts Developer Tools." + name: "access_token" + title: "Access Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-eventbrite: + type: "object" + required: + - "private_token" + - "start_date" + - "sourceType" + properties: + private_token: + type: "string" + description: "The private token to use for authenticating API requests." + name: "private_token" + order: 0 + title: "Private Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "eventbrite" + const: "eventbrite" + enum: + - "eventbrite" + order: 0 + type: "string" + source-eventbrite-update: + type: "object" + required: + - "private_token" + - "start_date" + properties: + private_token: + type: "string" + description: "The private token to use for authenticating API requests." + name: "private_token" + order: 0 + title: "Private Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-klaviyo: + title: "Klaviyo Spec" + type: "object" + properties: + api_key: + type: "string" + title: "Api Key" + description: + "Klaviyo API Key. See our docs if you need help finding this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This field is optional\ + \ - if not provided, all data will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 1 + disable_fetching_predictive_analytics: + type: "boolean" + title: "Disable Fetching Predictive Analytics" + description: + "Certain streams like the profiles stream can retrieve predictive\ + \ analytics data from Klaviyo's API. However, at high volume, this can\ + \ lead to service availability issues on the API which can be improved\ + \ by not fetching this field. WARNING: Enabling this setting will stop\ + \ the \"predictive_analytics\" column from being populated in your downstream\ + \ destination." + order: 2 + sourceType: + title: "klaviyo" + const: "klaviyo" + enum: + - "klaviyo" + order: 0 + type: "string" + required: + - "api_key" + - "sourceType" + source-klaviyo-update: + title: "Klaviyo Spec" + type: "object" + properties: + api_key: + type: "string" + title: "Api Key" + description: + "Klaviyo API Key. See our docs if you need help finding this key." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This field is optional\ + \ - if not provided, all data will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 1 + disable_fetching_predictive_analytics: + type: "boolean" + title: "Disable Fetching Predictive Analytics" + description: + "Certain streams like the profiles stream can retrieve predictive\ + \ analytics data from Klaviyo's API. However, at high volume, this can\ + \ lead to service availability issues on the API which can be improved\ + \ by not fetching this field. WARNING: Enabling this setting will stop\ + \ the \"predictive_analytics\" column from being populated in your downstream\ + \ destination." + order: 2 + required: + - "api_key" + source-datadog: + type: "object" + required: + - "api_key" + - "application_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Datadog API key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + application_key: + type: "string" + description: "Datadog application key" + order: 1 + title: "Application Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + description: + "The search query. This just applies to Incremental syncs.\ + \ If empty, it'll collect all logs." + order: 2 + title: "Query" + start_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This just applies to Incremental\ + \ syncs." + order: 3 + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2023-12-01T00:00:00Z" + site: + type: "string" + description: "The site where Datadog data resides in." + enum: + - "datadoghq.com" + - "us3.datadoghq.com" + - "us5.datadoghq.com" + - "datadoghq.eu" + - "ddog-gov.com" + order: 4 + title: "Site" + default: "datadoghq.com" + end_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ + \ after this date will not be replicated. An empty value will represent\ + \ the current datetime for each execution. This just applies to Incremental\ + \ syncs." + order: 5 + title: "End date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2024-01-01T00:00:00Z" + max_records_per_request: + type: "integer" + description: "Maximum number of records to collect per request." + order: 6 + title: "Max records per requests" + default: 5000 + maximum: 5000 + minimum: 1 + queries: + type: "array" + description: "List of queries to be run and used as inputs." + items: + type: "object" + required: + - "name" + - "data_source" + - "query" + properties: + name: + type: "string" + description: "The variable name for use in queries." + order: 1 + title: "Query Name" + query: + type: "string" + description: "A classic query string." + order: 3 + title: "Query" + data_source: + type: "string" + description: "A data source that is powered by the platform." + enum: + - "metrics" + - "cloud_cost" + - "logs" + - "rum" + order: 2 + title: "Data Source" + order: 7 + title: "Queries" + default: [] + sourceType: + title: "datadog" + const: "datadog" + enum: + - "datadog" + order: 0 + type: "string" + source-datadog-update: + type: "object" + required: + - "api_key" + - "application_key" + properties: + api_key: + type: "string" + description: "Datadog API key" + order: 0 + title: "API Key" + airbyte_secret: true + application_key: + type: "string" + description: "Datadog application key" + order: 1 + title: "Application Key" + airbyte_secret: true + query: + type: "string" + description: + "The search query. This just applies to Incremental syncs.\ + \ If empty, it'll collect all logs." + order: 2 + title: "Query" + start_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This just applies to Incremental\ + \ syncs." + order: 3 + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2023-12-01T00:00:00Z" + site: + type: "string" + description: "The site where Datadog data resides in." + enum: + - "datadoghq.com" + - "us3.datadoghq.com" + - "us5.datadoghq.com" + - "datadoghq.eu" + - "ddog-gov.com" + order: 4 + title: "Site" + default: "datadoghq.com" + end_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ + \ after this date will not be replicated. An empty value will represent\ + \ the current datetime for each execution. This just applies to Incremental\ + \ syncs." + order: 5 + title: "End date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2024-01-01T00:00:00Z" + max_records_per_request: + type: "integer" + description: "Maximum number of records to collect per request." + order: 6 + title: "Max records per requests" + default: 5000 + maximum: 5000 + minimum: 1 + queries: + type: "array" + description: "List of queries to be run and used as inputs." + items: + type: "object" + required: + - "name" + - "data_source" + - "query" + properties: + name: + type: "string" + description: "The variable name for use in queries." + order: 1 + title: "Query Name" + query: + type: "string" + description: "A classic query string." + order: 3 + title: "Query" + data_source: + type: "string" + description: "A data source that is powered by the platform." + enum: + - "metrics" + - "cloud_cost" + - "logs" + - "rum" + order: 2 + title: "Data Source" + order: 7 + title: "Queries" + default: [] + source-luma: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Get your API key on lu.ma Calendars dashboard → Settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "luma" + const: "luma" + enum: + - "luma" + order: 0 + type: "string" + source-luma-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "Get your API key on lu.ma Calendars dashboard → Settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-dockerhub: + type: "object" + required: + - "docker_username" + - "sourceType" + properties: + docker_username: + type: "string" + order: 0 + title: "Docker Username" + description: + "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + sourceType: + title: "dockerhub" + const: "dockerhub" + enum: + - "dockerhub" + order: 0 + type: "string" + source-dockerhub-update: + type: "object" + required: + - "docker_username" + properties: + docker_username: + type: "string" + order: 0 + title: "Docker Username" + description: + "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + source-webflow: + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + - "sourceType" + properties: + site_id: + title: "Site id" + type: "string" + description: + "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + accept_version: + title: "Accept Version" + type: "string" + description: "The version of the Webflow API to use. See https://developers.webflow.com/#versioning" + example: "1.0.0" + order: 2 + sourceType: + title: "webflow" + const: "webflow" + enum: + - "webflow" + order: 0 + type: "string" + source-webflow-update: + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + properties: + site_id: + title: "Site id" + type: "string" + description: + "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + accept_version: + title: "Accept Version" + type: "string" + description: "The version of the Webflow API to use. See https://developers.webflow.com/#versioning" + example: "1.0.0" + order: 2 + source-scryfall: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "scryfall" + const: "scryfall" + enum: + - "scryfall" + order: 0 + type: "string" + source-scryfall-update: + type: "object" + required: [] + properties: {} + source-beamer: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "beamer" + const: "beamer" + enum: + - "beamer" + order: 0 + type: "string" + source-beamer-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-high-level: + type: "object" + required: + - "location_id" + - "api_key" + - "start_date" + - "sourceType" + properties: + location_id: + type: "string" + order: 0 + title: "Location ID" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "high-level" + const: "high-level" + enum: + - "high-level" + order: 0 + type: "string" + source-high-level-update: + type: "object" + required: + - "location_id" + - "api_key" + - "start_date" + properties: + location_id: + type: "string" + order: 0 + title: "Location ID" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-wikipedia-pageviews: + type: "object" + required: + - "access" + - "agent" + - "article" + - "country" + - "end" + - "project" + - "start" + - "sourceType" + properties: + access: + type: "string" + title: "Access" + description: + "If you want to filter by access method, use one of desktop,\ + \ mobile-app or mobile-web. If you are interested in pageviews regardless\ + \ of access method, use all-access." + examples: + - "all-access" + - "desktop" + - "mobile-app" + - "mobile-web" + order: 0 + agent: + type: "string" + title: "Agent" + description: + "If you want to filter by agent type, use one of user, automated\ + \ or spider. If you are interested in pageviews regardless of agent type,\ + \ use all-agents." + examples: + - "all-agents" + - "user" + - "spider" + - "automated" + order: 1 + article: + type: "string" + title: "Article" + description: + "The title of any article in the specified project. Any spaces\ + \ should be replaced with underscores. It also should be URI-encoded,\ + \ so that non-URI-safe characters like %, / or ? are accepted." + examples: + - "Are_You_the_One%3F" + order: 2 + country: + type: "string" + title: "Country" + description: + "The ISO 3166-1 alpha-2 code of a country for which to retrieve\ + \ top articles." + examples: + - "FR" + - "IN" + order: 3 + end: + type: "string" + title: "End" + description: + "The date of the last day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format." + order: 4 + project: + type: "string" + title: "Project" + description: + "If you want to filter by project, use the domain of any Wikimedia\ + \ project." + examples: + - "en.wikipedia.org" + - "www.mediawiki.org" + - "commons.wikimedia.org" + order: 5 + start: + type: "string" + title: "Start" + description: + "The date of the first day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format. Also serves as the date to retrieve data for the top articles." + order: 6 + sourceType: + title: "wikipedia-pageviews" + const: "wikipedia-pageviews" + enum: + - "wikipedia-pageviews" + order: 0 + type: "string" + source-wikipedia-pageviews-update: + type: "object" + required: + - "access" + - "agent" + - "article" + - "country" + - "end" + - "project" + - "start" + properties: + access: + type: "string" + title: "Access" + description: + "If you want to filter by access method, use one of desktop,\ + \ mobile-app or mobile-web. If you are interested in pageviews regardless\ + \ of access method, use all-access." + examples: + - "all-access" + - "desktop" + - "mobile-app" + - "mobile-web" + order: 0 + agent: + type: "string" + title: "Agent" + description: + "If you want to filter by agent type, use one of user, automated\ + \ or spider. If you are interested in pageviews regardless of agent type,\ + \ use all-agents." + examples: + - "all-agents" + - "user" + - "spider" + - "automated" + order: 1 + article: + type: "string" + title: "Article" + description: + "The title of any article in the specified project. Any spaces\ + \ should be replaced with underscores. It also should be URI-encoded,\ + \ so that non-URI-safe characters like %, / or ? are accepted." + examples: + - "Are_You_the_One%3F" + order: 2 + country: + type: "string" + title: "Country" + description: + "The ISO 3166-1 alpha-2 code of a country for which to retrieve\ + \ top articles." + examples: + - "FR" + - "IN" + order: 3 + end: + type: "string" + title: "End" + description: + "The date of the last day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format." + order: 4 + project: + type: "string" + title: "Project" + description: + "If you want to filter by project, use the domain of any Wikimedia\ + \ project." + examples: + - "en.wikipedia.org" + - "www.mediawiki.org" + - "commons.wikimedia.org" + order: 5 + start: + type: "string" + title: "Start" + description: + "The date of the first day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format. Also serves as the date to retrieve data for the top articles." + order: 6 + source-google-directory: + title: "Google Directory Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Google Credentials" + description: + "Google APIs use the OAuth 2.0 protocol for authentication\ + \ and authorization. The Source supports Web server application and Service accounts scenarios." + type: "object" + oneOf: + - title: "Sign in via Google (OAuth)" + description: + "For these scenario user only needs to give permission to\ + \ read Google Directory data." + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Web server app" + order: 0 + enum: + - "Web server app" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of the developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client secret" + type: "string" + description: "The Client Secret of the developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The Token for obtaining a new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Service Account Key" + description: + "For these scenario user should obtain service account's\ + \ credentials from the Google API Console and provide delegated email." + type: "object" + required: + - "credentials_json" + - "email" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Service accounts" + order: 0 + enum: + - "Service accounts" + credentials_json: + type: "string" + title: "Credentials JSON" + description: + "The contents of the JSON service account key. See the\ + \ docs for more information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + type: "string" + title: "Email" + description: + "The email of the user, which has permissions to access\ + \ the Google Workspace Admin APIs." + sourceType: + title: "google-directory" + const: "google-directory" + enum: + - "google-directory" + order: 0 + type: "string" + source-google-directory-update: + title: "Google Directory Spec" + type: "object" + required: [] + properties: + credentials: + title: "Google Credentials" + description: + "Google APIs use the OAuth 2.0 protocol for authentication\ + \ and authorization. The Source supports Web server application and Service accounts scenarios." + type: "object" + oneOf: + - title: "Sign in via Google (OAuth)" + description: + "For these scenario user only needs to give permission to\ + \ read Google Directory data." + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Web server app" + order: 0 + enum: + - "Web server app" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of the developer application." + airbyte_secret: true + client_secret: + title: "Client secret" + type: "string" + description: "The Client Secret of the developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The Token for obtaining a new access token." + airbyte_secret: true + - title: "Service Account Key" + description: + "For these scenario user should obtain service account's\ + \ credentials from the Google API Console and provide delegated email." + type: "object" + required: + - "credentials_json" + - "email" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Service accounts" + order: 0 + enum: + - "Service accounts" + credentials_json: + type: "string" + title: "Credentials JSON" + description: + "The contents of the JSON service account key. See the\ + \ docs for more information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: + "The email of the user, which has permissions to access\ + \ the Google Workspace Admin APIs." + source-smartengage: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "smartengage" + const: "smartengage" + enum: + - "smartengage" + order: 0 + type: "string" + source-smartengage-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + source-outbrain-amplify: + title: "Outbrain Amplify Spec" + type: "object" + required: + - "credentials" + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authentication Method" + description: + "Credentials for making authenticated requests requires either\ + \ username/password or access_token." + default: {} + order: 0 + type: "object" + oneOf: + - title: "Access token" + type: "object" + properties: + type: + title: "Access token is required for authentication requests." + const: "access_token" + type: "string" + enum: + - "access_token" + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "type" + - "access_token" + - title: "Username Password" + type: "object" + properties: + type: + title: + "Both username and password is required for authentication\ + \ request." + const: "username_password" + type: "string" + enum: + - "username_password" + username: + type: "string" + description: "Add Username for authentication." + password: + type: "string" + description: "Add Password for authentication." + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "type" + - "username" + - "password" + report_granularity: + title: "Granularity for periodic reports." + description: + "The granularity used for periodic data in reports. See the docs." + enum: + - "daily" + - "weekly" + - "monthly" + order: 1 + type: "string" + geo_location_breakdown: + title: "Granularity for geo-location region." + description: "The granularity used for geo location data in reports." + enum: + - "country" + - "region" + - "subregion" + order: 2 + type: "string" + start_date: + type: "string" + order: 3 + description: + "Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + order: 4 + description: "Date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + sourceType: + title: "outbrain-amplify" + const: "outbrain-amplify" + enum: + - "outbrain-amplify" + order: 0 + type: "string" + source-outbrain-amplify-update: + title: "Outbrain Amplify Spec" + type: "object" + required: + - "credentials" + - "start_date" + properties: + credentials: + title: "Authentication Method" + description: + "Credentials for making authenticated requests requires either\ + \ username/password or access_token." + default: {} + order: 0 + type: "object" + oneOf: + - title: "Access token" + type: "object" + properties: + type: + title: "Access token is required for authentication requests." + const: "access_token" + type: "string" + enum: + - "access_token" + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + required: + - "type" + - "access_token" + - title: "Username Password" + type: "object" + properties: + type: + title: + "Both username and password is required for authentication\ + \ request." + const: "username_password" + type: "string" + enum: + - "username_password" + username: + type: "string" + description: "Add Username for authentication." + password: + type: "string" + description: "Add Password for authentication." + airbyte_secret: true + required: + - "type" + - "username" + - "password" + report_granularity: + title: "Granularity for periodic reports." + description: + "The granularity used for periodic data in reports. See the docs." + enum: + - "daily" + - "weekly" + - "monthly" + order: 1 + type: "string" + geo_location_breakdown: + title: "Granularity for geo-location region." + description: "The granularity used for geo location data in reports." + enum: + - "country" + - "region" + - "subregion" + order: 2 + type: "string" + start_date: + type: "string" + order: 3 + description: + "Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + order: 4 + description: "Date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + source-k6-cloud: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + title: "Api Token" + description: + "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "k6-cloud" + const: "k6-cloud" + enum: + - "k6-cloud" + order: 0 + type: "string" + source-k6-cloud-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + title: "Api Token" + description: + "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + source-postgres: + title: "Postgres Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "sourceType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + group: "db" + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + group: "db" + database: + title: "Database Name" + description: "Name of the database." + type: "string" + order: 2 + group: "db" + schemas: + title: "Schemas" + description: + "The list of schemas (case sensitive) to sync from. Defaults\ + \ to public." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "public" + order: 3 + group: "db" + username: + title: "Username" + description: "Username to access the database." + type: "string" + order: 4 + group: "auth" + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + group: "auth" + always_show: true + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 6 + group: "advanced" + pattern_descriptor: "key1=value1&key2=value2" + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n Read more in the docs." + type: "object" + order: 8 + group: "security" + oneOf: + - title: "disable" + additionalProperties: true + description: + "Disables encryption of communication between Airbyte and\ + \ source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + order: 0 + enum: + - "disable" + - title: "allow" + additionalProperties: true + description: "Enables encryption only when required by the source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + order: 0 + enum: + - "allow" + - title: "prefer" + additionalProperties: true + description: + "Allows unencrypted connection only if the source database\ + \ does not support encryption." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + order: 0 + enum: + - "prefer" + - title: "require" + additionalProperties: true + description: + "Always require encryption. If the source database server\ + \ does not support encryption, connection will fail." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + order: 0 + enum: + - "require" + - title: "verify-ca" + additionalProperties: true + description: + "Always require encryption and verifies that the source database\ + \ server has a valid SSL certificate." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + order: 0 + enum: + - "verify-ca" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: true + description: + "This is the most secure mode. Always require encryption\ + \ and verifies the identity of the source database server." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + order: 0 + enum: + - "verify-full" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 9 + group: "advanced" + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Write-Ahead Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." + required: + - "method" + - "replication_slot" + - "publication" + additionalProperties: true + properties: + method: + type: "string" + const: "CDC" + order: 1 + enum: + - "CDC" + plugin: + type: "string" + title: "Plugin" + description: + "A logical decoding plugin installed on the PostgreSQL\ + \ server." + enum: + - "pgoutput" + default: "pgoutput" + order: 2 + replication_slot: + type: "string" + title: "Replication Slot" + description: + "A plugin logical replication slot. Read about replication slots." + order: 3 + publication: + type: "string" + title: "Publication" + description: + "A Postgres publication used for consuming changes. Read\ + \ about publications and replication identities." + order: 4 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 1200 seconds. Valid range: 120 seconds to 2400 seconds. Read about\ + \ initial waiting time." + default: 1200 + order: 5 + min: 120 + max: 2400 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 6 + min: 1000 + max: 10000 + lsn_commit_behaviour: + type: "string" + title: "LSN commit behaviour" + description: + "Determines when Airbyte should flush the LSN of processed\ + \ WAL logs in the source database. `After loading Data in the destination`\ + \ is default. If `While reading Data` is selected, in case of a\ + \ downstream failure (while loading data into the destination),\ + \ next sync would result in a full sync." + enum: + - "While reading Data" + - "After loading Data in the destination" + default: "After loading Data in the destination" + order: 7 + heartbeat_action_query: + type: "string" + title: "Debezium heartbeat query (Advanced)" + description: + "Specifies a query that the connector executes on the\ + \ source database when the connector sends a heartbeat message.\ + \ Please see the setup guide for how and when to configure this setting." + default: "" + order: 8 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 9 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 10 + - title: "Detect Changes with Xmin System Column" + description: + "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Suitable for databases that have low transaction\ + \ pressure." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + order: 8 + enum: + - "Standard" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + group: "security" + sourceType: + title: "postgres" + const: "postgres" + enum: + - "postgres" + order: 0 + type: "string" + groups: + - id: "db" + - id: "auth" + - id: "security" + title: "Security" + - id: "advanced" + title: "Advanced" + source-postgres-update: + title: "Postgres Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + group: "db" + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + group: "db" + database: + title: "Database Name" + description: "Name of the database." + type: "string" + order: 2 + group: "db" + schemas: + title: "Schemas" + description: + "The list of schemas (case sensitive) to sync from. Defaults\ + \ to public." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "public" + order: 3 + group: "db" + username: + title: "Username" + description: "Username to access the database." + type: "string" + order: 4 + group: "auth" + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + group: "auth" + always_show: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 6 + group: "advanced" + pattern_descriptor: "key1=value1&key2=value2" + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n Read more in the docs." + type: "object" + order: 8 + group: "security" + oneOf: + - title: "disable" + additionalProperties: true + description: + "Disables encryption of communication between Airbyte and\ + \ source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + order: 0 + enum: + - "disable" + - title: "allow" + additionalProperties: true + description: "Enables encryption only when required by the source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + order: 0 + enum: + - "allow" + - title: "prefer" + additionalProperties: true + description: + "Allows unencrypted connection only if the source database\ + \ does not support encryption." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + order: 0 + enum: + - "prefer" + - title: "require" + additionalProperties: true + description: + "Always require encryption. If the source database server\ + \ does not support encryption, connection will fail." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + order: 0 + enum: + - "require" + - title: "verify-ca" + additionalProperties: true + description: + "Always require encryption and verifies that the source database\ + \ server has a valid SSL certificate." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + order: 0 + enum: + - "verify-ca" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: true + description: + "This is the most secure mode. Always require encryption\ + \ and verifies the identity of the source database server." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + order: 0 + enum: + - "verify-full" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 9 + group: "advanced" + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Write-Ahead Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." + required: + - "method" + - "replication_slot" + - "publication" + additionalProperties: true + properties: + method: + type: "string" + const: "CDC" + order: 1 + enum: + - "CDC" + plugin: + type: "string" + title: "Plugin" + description: + "A logical decoding plugin installed on the PostgreSQL\ + \ server." + enum: + - "pgoutput" + default: "pgoutput" + order: 2 + replication_slot: + type: "string" + title: "Replication Slot" + description: + "A plugin logical replication slot. Read about replication slots." + order: 3 + publication: + type: "string" + title: "Publication" + description: + "A Postgres publication used for consuming changes. Read\ + \ about publications and replication identities." + order: 4 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 1200 seconds. Valid range: 120 seconds to 2400 seconds. Read about\ + \ initial waiting time." + default: 1200 + order: 5 + min: 120 + max: 2400 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 6 + min: 1000 + max: 10000 + lsn_commit_behaviour: + type: "string" + title: "LSN commit behaviour" + description: + "Determines when Airbyte should flush the LSN of processed\ + \ WAL logs in the source database. `After loading Data in the destination`\ + \ is default. If `While reading Data` is selected, in case of a\ + \ downstream failure (while loading data into the destination),\ + \ next sync would result in a full sync." + enum: + - "While reading Data" + - "After loading Data in the destination" + default: "After loading Data in the destination" + order: 7 + heartbeat_action_query: + type: "string" + title: "Debezium heartbeat query (Advanced)" + description: + "Specifies a query that the connector executes on the\ + \ source database when the connector sends a heartbeat message.\ + \ Please see the setup guide for how and when to configure this setting." + default: "" + order: 8 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 9 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 10 + - title: "Detect Changes with Xmin System Column" + description: + "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Suitable for databases that have low transaction\ + \ pressure." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + order: 8 + enum: + - "Standard" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + group: "security" + groups: + - id: "db" + - id: "auth" + - id: "security" + title: "Security" + - id: "advanced" + title: "Advanced" + source-buildkite: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "buildkite" + const: "buildkite" + enum: + - "buildkite" + order: 0 + type: "string" + source-buildkite-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-fauna: + title: "Fauna Spec" + type: "object" + required: + - "domain" + - "port" + - "scheme" + - "secret" + - "sourceType" + properties: + domain: + order: 0 + type: "string" + title: "Domain" + description: + "Domain of Fauna to query. Defaults db.fauna.com. See the\ + \ docs." + default: "db.fauna.com" + port: + order: 1 + type: "integer" + title: "Port" + description: "Endpoint port." + default: 443 + scheme: + order: 2 + type: "string" + title: "Scheme" + description: "URL scheme." + default: "https" + secret: + order: 3 + type: "string" + title: "Fauna Secret" + description: "Fauna secret, used when authenticating with the database." + airbyte_secret: true + x-speakeasy-param-sensitive: true + collection: + order: 5 + type: "object" + title: "Collection" + description: "Settings for the Fauna Collection." + required: + - "page_size" + - "deletions" + properties: + page_size: + order: 4 + type: "integer" + title: "Page Size" + default: 64 + description: + "The page size used when reading documents from the database.\ + \ The larger the page size, the faster the connector processes documents.\ + \ However, if a page is too large, the connector may fail.
    \n\ + Choose your page size based on how large the documents are.
    \n\ + See the docs." + deletions: + order: 5 + type: "object" + title: "Deletion Mode" + description: + "This only applies to incremental syncs.
    \n\ + Enabling deletion mode informs your destination of deleted documents.
    \n\ + Disabled - Leave this feature disabled, and ignore deleted documents.
    \n\ + Enabled - Enables this feature. When a document is deleted, the connector\ + \ exports a record with a \"deleted at\" column containing the time\ + \ that the document was deleted." + oneOf: + - title: "Disabled" + type: "object" + order: 0 + required: + - "deletion_mode" + properties: + deletion_mode: + type: "string" + const: "ignore" + enum: + - "ignore" + - title: "Enabled" + type: "object" + order: 1 + required: + - "deletion_mode" + - "column" + properties: + deletion_mode: + type: "string" + const: "deleted_field" + enum: + - "deleted_field" + column: + type: "string" + title: "Deleted At Column" + description: 'Name of the "deleted at" column.' + default: "deleted_at" + sourceType: + title: "fauna" + const: "fauna" + enum: + - "fauna" + order: 0 + type: "string" + source-fauna-update: + title: "Fauna Spec" + type: "object" + required: + - "domain" + - "port" + - "scheme" + - "secret" + properties: + domain: + order: 0 + type: "string" + title: "Domain" + description: + "Domain of Fauna to query. Defaults db.fauna.com. See the\ + \ docs." + default: "db.fauna.com" + port: + order: 1 + type: "integer" + title: "Port" + description: "Endpoint port." + default: 443 + scheme: + order: 2 + type: "string" + title: "Scheme" + description: "URL scheme." + default: "https" + secret: + order: 3 + type: "string" + title: "Fauna Secret" + description: "Fauna secret, used when authenticating with the database." + airbyte_secret: true + collection: + order: 5 + type: "object" + title: "Collection" + description: "Settings for the Fauna Collection." + required: + - "page_size" + - "deletions" + properties: + page_size: + order: 4 + type: "integer" + title: "Page Size" + default: 64 + description: + "The page size used when reading documents from the database.\ + \ The larger the page size, the faster the connector processes documents.\ + \ However, if a page is too large, the connector may fail.
    \n\ + Choose your page size based on how large the documents are.
    \n\ + See the docs." + deletions: + order: 5 + type: "object" + title: "Deletion Mode" + description: + "This only applies to incremental syncs.
    \n\ + Enabling deletion mode informs your destination of deleted documents.
    \n\ + Disabled - Leave this feature disabled, and ignore deleted documents.
    \n\ + Enabled - Enables this feature. When a document is deleted, the connector\ + \ exports a record with a \"deleted at\" column containing the time\ + \ that the document was deleted." + oneOf: + - title: "Disabled" + type: "object" + order: 0 + required: + - "deletion_mode" + properties: + deletion_mode: + type: "string" + const: "ignore" + enum: + - "ignore" + - title: "Enabled" + type: "object" + order: 1 + required: + - "deletion_mode" + - "column" + properties: + deletion_mode: + type: "string" + const: "deleted_field" + enum: + - "deleted_field" + column: + type: "string" + title: "Deleted At Column" + description: 'Name of the "deleted at" column.' + default: "deleted_at" + source-twilio: + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + - "sourceType" + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + x-speakeasy-param-sensitive: true + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + x-speakeasy-param-sensitive: true + start_date: + title: "Replication Start Date" + description: + "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + format: "date-time" + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + sourceType: + title: "twilio" + const: "twilio" + enum: + - "twilio" + order: 0 + type: "string" + source-twilio-update: + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + start_date: + title: "Replication Start Date" + description: + "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + format: "date-time" + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + source-sendgrid: + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 0 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + description: + "Sendgrid API Key, use admin to generate this key." + x-speakeasy-param-sensitive: true + sourceType: + title: "sendgrid" + const: "sendgrid" + enum: + - "sendgrid" + order: 0 + type: "string" + source-sendgrid-update: + type: "object" + required: + - "start_date" + - "api_key" + properties: + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 0 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + description: + "Sendgrid API Key, use admin to generate this key." + source-gnews: + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + order: 1 + title: "Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: + "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: + "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: + "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: + "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 7 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: + "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 8 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 9 + title: "Top Headlines Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 10 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + sourceType: + title: "gnews" + const: "gnews" + enum: + - "gnews" + order: 0 + type: "string" + source-gnews-update: + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + query: + type: "string" + order: 1 + title: "Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: + "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: + "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: + "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: + "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 7 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: + "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 8 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 9 + title: "Top Headlines Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 10 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + source-google-ads: + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + type: "object" + description: "" + title: "Google Credentials" + order: 0 + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + order: 3 + description: + "The token used to obtain a new Access Token. For detailed\ + \ instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + order: 4 + description: + "The Access Token for making authenticated requests. For\ + \ detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + customer_id: + title: "Customer ID(s)" + type: "string" + description: + "Comma-separated list of (client) customer IDs. Each customer\ + \ ID must be specified as a 10-digit number without dashes. For detailed\ + \ instructions on finding this value, refer to our documentation." + pattern: "^[0-9]{10}(,[0-9]{10})*$" + pattern_descriptor: + "The customer ID must be 10 digits. Separate multiple\ + \ customer IDs using commas." + examples: + - "6783948572,5839201945" + order: 1 + customer_status_filter: + title: "Customer Statuses Filter" + description: + "A list of customer statuses to filter on. For detailed info\ + \ about what each status mean refer to Google Ads documentation." + default: [] + order: 2 + type: "array" + items: + title: "CustomerStatus" + description: "An enumeration." + enum: + - "UNKNOWN" + - "ENABLED" + - "CANCELED" + - "SUSPENDED" + - "CLOSED" + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-25" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. (Default value of today is used if not set)" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-30" + order: 4 + format: "date" + custom_queries_array: + type: "array" + title: "Custom GAQL Queries" + description: "" + order: 5 + items: + type: "object" + required: + - "query" + - "table_name" + properties: + query: + type: "string" + multiline: true + title: "Custom Query" + description: + "A custom defined GAQL query for building the report.\ + \ Avoid including the segments.date field; wherever possible, Airbyte\ + \ will automatically include it for incremental syncs. For more\ + \ information, refer to Google's documentation." + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination Table Name" + description: + "The table name in your destination database for the\ + \ chosen query." + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: + "A conversion window is the number of days after an ad interaction\ + \ (such as an ad click or video view) during which a conversion, such\ + \ as a purchase, is recorded in Google Ads. For more information, see\ + \ Google's documentation." + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + order: 6 + sourceType: + title: "google-ads" + const: "google-ads" + enum: + - "google-ads" + order: 0 + type: "string" + source-google-ads-update: + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + type: "object" + description: "" + title: "Google Credentials" + order: 0 + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + order: 3 + description: + "The token used to obtain a new Access Token. For detailed\ + \ instructions on finding this value, refer to our documentation." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + order: 4 + description: + "The Access Token for making authenticated requests. For\ + \ detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + customer_id: + title: "Customer ID(s)" + type: "string" + description: + "Comma-separated list of (client) customer IDs. Each customer\ + \ ID must be specified as a 10-digit number without dashes. For detailed\ + \ instructions on finding this value, refer to our documentation." + pattern: "^[0-9]{10}(,[0-9]{10})*$" + pattern_descriptor: + "The customer ID must be 10 digits. Separate multiple\ + \ customer IDs using commas." + examples: + - "6783948572,5839201945" + order: 1 + customer_status_filter: + title: "Customer Statuses Filter" + description: + "A list of customer statuses to filter on. For detailed info\ + \ about what each status mean refer to Google Ads documentation." + default: [] + order: 2 + type: "array" + items: + title: "CustomerStatus" + description: "An enumeration." + enum: + - "UNKNOWN" + - "ENABLED" + - "CANCELED" + - "SUSPENDED" + - "CLOSED" + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-25" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. (Default value of today is used if not set)" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-30" + order: 4 + format: "date" + custom_queries_array: + type: "array" + title: "Custom GAQL Queries" + description: "" + order: 5 + items: + type: "object" + required: + - "query" + - "table_name" + properties: + query: + type: "string" + multiline: true + title: "Custom Query" + description: + "A custom defined GAQL query for building the report.\ + \ Avoid including the segments.date field; wherever possible, Airbyte\ + \ will automatically include it for incremental syncs. For more\ + \ information, refer to Google's documentation." + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination Table Name" + description: + "The table name in your destination database for the\ + \ chosen query." + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: + "A conversion window is the number of days after an ad interaction\ + \ (such as an ad click or video view) during which a conversion, such\ + \ as a purchase, is recorded in Google Ads. For more information, see\ + \ Google's documentation." + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + order: 6 + source-google-search-console: + title: "Google Search Console Spec" + type: "object" + required: + - "site_urls" + - "authorization" + - "sourceType" + properties: + site_urls: + type: "array" + items: + type: "string" + title: "Website URL Property" + description: + "The URLs of the website property attached to your GSC account.\ + \ Learn more about properties here." + examples: + - "https://example1.com/" + - "sc-domain:example2.com" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true + order: 1 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." + examples: + - "2021-12-12" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + order: 2 + format: "date" + authorization: + type: "object" + title: "Authentication Type" + description: "" + order: 3 + oneOf: + - title: "OAuth" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "Access token for making authenticated requests. Read\ + \ more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "The token for obtaining a new access token. Read more\ + \ here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + title: "Admin Email" + type: "string" + description: + "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: + "A list of available dimensions. Please note, that for\ + \ technical reasons `date` is the default dimension which will be\ + \ included in your query whether you specify it or not. Primary\ + \ key will consist of your custom dimensions and the default dimension\ + \ along with `site_url` and `search_type`." + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + default: + - "date" + minItems: 0 + required: + - "name" + - "dimensions" + data_state: + type: "string" + title: "Data Freshness" + enum: + - "final" + - "all" + description: + "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." + examples: + - "final" + - "all" + default: "final" + order: 6 + sourceType: + title: "google-search-console" + const: "google-search-console" + enum: + - "google-search-console" + order: 0 + type: "string" + source-google-search-console-update: + title: "Google Search Console Spec" + type: "object" + required: + - "site_urls" + - "authorization" + properties: + site_urls: + type: "array" + items: + type: "string" + title: "Website URL Property" + description: + "The URLs of the website property attached to your GSC account.\ + \ Learn more about properties here." + examples: + - "https://example1.com/" + - "sc-domain:example2.com" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true + order: 1 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." + examples: + - "2021-12-12" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + order: 2 + format: "date" + authorization: + type: "object" + title: "Authentication Type" + description: "" + order: 3 + oneOf: + - title: "OAuth" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "Access token for making authenticated requests. Read\ + \ more here." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "The token for obtaining a new access token. Read more\ + \ here." + airbyte_secret: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + email: + title: "Admin Email" + type: "string" + description: + "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: + "A list of available dimensions. Please note, that for\ + \ technical reasons `date` is the default dimension which will be\ + \ included in your query whether you specify it or not. Primary\ + \ key will consist of your custom dimensions and the default dimension\ + \ along with `site_url` and `search_type`." + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + default: + - "date" + minItems: 0 + required: + - "name" + - "dimensions" + data_state: + type: "string" + title: "Data Freshness" + enum: + - "final" + - "all" + description: + "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." + examples: + - "final" + - "all" + default: "final" + order: 6 + source-kyve: + title: "KYVE Spec" + type: "object" + required: + - "pool_ids" + - "start_ids" + - "url_base" + - "sourceType" + properties: + pool_ids: + type: "string" + title: "Pool-IDs" + description: + "The IDs of the KYVE storage pool you want to archive. (Comma\ + \ separated)" + order: 0 + examples: + - "0" + - "0,1" + start_ids: + type: "string" + title: "Bundle-Start-IDs" + description: + "The start-id defines, from which bundle id the pipeline should\ + \ start to extract the data. (Comma separated)" + order: 1 + examples: + - "0" + - "0,0" + url_base: + type: "string" + title: "KYVE-API URL Base" + description: "URL to the KYVE Chain API." + default: "https://api.kyve.network" + order: 2 + examples: + - "https://api.kaon.kyve.network/" + - "https://api.korellia.kyve.network/" + sourceType: + title: "kyve" + const: "kyve" + enum: + - "kyve" + order: 0 + type: "string" + source-kyve-update: + title: "KYVE Spec" + type: "object" + required: + - "pool_ids" + - "start_ids" + - "url_base" + properties: + pool_ids: + type: "string" + title: "Pool-IDs" + description: + "The IDs of the KYVE storage pool you want to archive. (Comma\ + \ separated)" + order: 0 + examples: + - "0" + - "0,1" + start_ids: + type: "string" + title: "Bundle-Start-IDs" + description: + "The start-id defines, from which bundle id the pipeline should\ + \ start to extract the data. (Comma separated)" + order: 1 + examples: + - "0" + - "0,0" + url_base: + type: "string" + title: "KYVE-API URL Base" + description: "URL to the KYVE Chain API." + default: "https://api.kyve.network" + order: 2 + examples: + - "https://api.kaon.kyve.network/" + - "https://api.korellia.kyve.network/" + source-strava: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "athlete_id" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + description: "The Client ID of your Strava developer application." + title: "Client ID" + pattern: "^[0-9_\\-]+$" + examples: + - "12345" + order: 0 + client_secret: + type: "string" + description: "The Client Secret of your Strava developer application." + title: "Client Secret" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + description: "The Refresh Token with the activity: read_all permissions." + title: "Refresh Token" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + athlete_id: + type: "integer" + description: "The Athlete ID of your Strava developer application." + title: "Athlete ID" + pattern: "^[0-9_\\-]+$" + examples: + - "17831421" + order: 3 + start_date: + type: "string" + description: "UTC date and time. Any data before this date will not be replicated." + title: "Start Date" + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 4 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 5 + sourceType: + title: "strava" + const: "strava" + enum: + - "strava" + order: 0 + type: "string" + source-strava-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "athlete_id" + - "start_date" + properties: + client_id: + type: "string" + description: "The Client ID of your Strava developer application." + title: "Client ID" + pattern: "^[0-9_\\-]+$" + examples: + - "12345" + order: 0 + client_secret: + type: "string" + description: "The Client Secret of your Strava developer application." + title: "Client Secret" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 1 + refresh_token: + type: "string" + description: "The Refresh Token with the activity: read_all permissions." + title: "Refresh Token" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 2 + athlete_id: + type: "integer" + description: "The Athlete ID of your Strava developer application." + title: "Athlete ID" + pattern: "^[0-9_\\-]+$" + examples: + - "17831421" + order: 3 + start_date: + type: "string" + description: "UTC date and time. Any data before this date will not be replicated." + title: "Start Date" + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 4 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 5 + source-smaily: + type: "object" + required: + - "api_password" + - "api_subdomain" + - "api_username" + - "sourceType" + properties: + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + order: 1 + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + order: 2 + sourceType: + title: "smaily" + const: "smaily" + enum: + - "smaily" + order: 0 + type: "string" + source-smaily-update: + type: "object" + required: + - "api_password" + - "api_subdomain" + - "api_username" + properties: + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + order: 0 + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + order: 1 + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + order: 2 + source-height: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + search_query: + type: "string" + description: "Search query to be used with search stream" + title: "search_query" + default: "task" + order: 2 + sourceType: + title: "height" + const: "height" + enum: + - "height" + order: 0 + type: "string" + source-height-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + search_query: + type: "string" + description: "Search query to be used with search stream" + title: "search_query" + default: "task" + order: 2 + source-piwik: + type: "object" + required: + - "client_id" + - "client_secret" + - "organization_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: "The organization id appearing at URL of your piwik website" + order: 2 + title: "Organization ID" + sourceType: + title: "piwik" + const: "piwik" + enum: + - "piwik" + order: 0 + type: "string" + source-piwik-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "organization_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + organization_id: + type: "string" + description: "The organization id appearing at URL of your piwik website" + order: 2 + title: "Organization ID" + source-polygon-stock-api: + type: "object" + required: + - "apiKey" + - "end_date" + - "multiplier" + - "start_date" + - "stocksTicker" + - "timespan" + - "sourceType" + properties: + sort: + type: "string" + order: 5 + title: "Sort" + examples: + - "asc" + - "desc" + description: + "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + limit: + type: "integer" + order: 3 + title: "Limit" + examples: + - 100 + - 120 + description: "The target date for the aggregate window." + apiKey: + type: "string" + order: 1 + title: "API Key" + description: "Your API ACCESS Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + adjusted: + type: "string" + order: 0 + title: "Adjusted" + examples: + - "true" + - "false" + description: + "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + end_date: + type: "string" + order: 2 + title: "End Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The target date for the aggregate window." + timespan: + type: "string" + order: 8 + title: "Timespan" + examples: + - "day" + description: "The size of the time window." + multiplier: + type: "integer" + order: 4 + title: "Multiplier" + examples: + - 1 + - 2 + description: "The size of the timespan multiplier." + start_date: + type: "string" + order: 6 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The beginning date for the aggregate window." + stocksTicker: + type: "string" + order: 7 + title: "Stock Ticker" + examples: + - "IBM" + - "MSFT" + description: "The exchange symbol that this item is traded under." + sourceType: + title: "polygon-stock-api" + const: "polygon-stock-api" + enum: + - "polygon-stock-api" + order: 0 + type: "string" + source-polygon-stock-api-update: + type: "object" + required: + - "apiKey" + - "end_date" + - "multiplier" + - "start_date" + - "stocksTicker" + - "timespan" + properties: + sort: + type: "string" + order: 5 + title: "Sort" + examples: + - "asc" + - "desc" + description: + "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + limit: + type: "integer" + order: 3 + title: "Limit" + examples: + - 100 + - 120 + description: "The target date for the aggregate window." + apiKey: + type: "string" + order: 1 + title: "API Key" + description: "Your API ACCESS Key" + airbyte_secret: true + adjusted: + type: "string" + order: 0 + title: "Adjusted" + examples: + - "true" + - "false" + description: + "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + end_date: + type: "string" + order: 2 + title: "End Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The target date for the aggregate window." + timespan: + type: "string" + order: 8 + title: "Timespan" + examples: + - "day" + description: "The size of the time window." + multiplier: + type: "integer" + order: 4 + title: "Multiplier" + examples: + - 1 + - 2 + description: "The size of the timespan multiplier." + start_date: + type: "string" + order: 6 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The beginning date for the aggregate window." + stocksTicker: + type: "string" + order: 7 + title: "Stock Ticker" + examples: + - "IBM" + - "MSFT" + description: "The exchange symbol that this item is traded under." + source-shopify: + title: "Shopify Source CDK Specifications" + type: "object" + required: + - "shop" + - "sourceType" + properties: + shop: + type: "string" + title: "Shopify Store" + description: + "The name of your Shopify store found in the URL. For example,\ + \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ + \ or 'NAME.myshopify.com'." + pattern: "^(?!https://)(?!https://).*" + examples: + - "my-store" + - "my-store.myshopify.com" + order: 1 + credentials: + title: "Shopify Authorization Method" + description: "The authorization method to use to retrieve data from Shopify" + type: "object" + order: 2 + oneOf: + - type: "object" + title: "OAuth2.0" + description: "OAuth2.0" + required: + - "auth_method" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "The Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: + "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Replication Start Date" + description: + "The date you would like to replicate data from. Format: YYYY-MM-DD.\ + \ Any data before this date will not be replicated." + default: "2020-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 3 + bulk_window_in_days: + type: "integer" + title: "GraphQL BULK Date Range in Days" + description: "Defines what would be a date range per single BULK Job" + default: 30 + fetch_transactions_user_id: + type: "boolean" + title: "Add `user_id` to Transactions (slower)" + description: + "Defines which API type (REST/BULK) to use to fetch `Transactions`\ + \ data. If you are a `Shopify Plus` user, leave the default value to speed\ + \ up the fetch." + default: false + job_product_variants_include_pres_prices: + type: "boolean" + title: "Add `Presentment prices` to Product Variants" + description: + "If enabled, the `Product Variants` stream attempts to include\ + \ `Presentment prices` field (may affect the performance)." + default: true + job_termination_threshold: + type: "integer" + title: "BULK Job termination threshold" + description: + "The max time in seconds, after which the single BULK Job should\ + \ be `CANCELED` and retried. The bigger the value the longer the BULK\ + \ Job is allowed to run." + default: 7200 + minimum: 3600 + maximum: 21600 + job_checkpoint_interval: + type: "integer" + title: "BULK Job checkpoint (rows collected)" + description: "The threshold, after which the single BULK Job should be checkpointed." + default: 100000 + minimum: 15000 + maximum: 200000 + sourceType: + title: "shopify" + const: "shopify" + enum: + - "shopify" + order: 0 + type: "string" + source-shopify-update: + title: "Shopify Source CDK Specifications" + type: "object" + required: + - "shop" + properties: + shop: + type: "string" + title: "Shopify Store" + description: + "The name of your Shopify store found in the URL. For example,\ + \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ + \ or 'NAME.myshopify.com'." + pattern: "^(?!https://)(?!https://).*" + examples: + - "my-store" + - "my-store.myshopify.com" + order: 1 + credentials: + title: "Shopify Authorization Method" + description: "The authorization method to use to retrieve data from Shopify" + type: "object" + order: 2 + oneOf: + - type: "object" + title: "OAuth2.0" + description: "OAuth2.0" + required: + - "auth_method" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "The Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: + "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Replication Start Date" + description: + "The date you would like to replicate data from. Format: YYYY-MM-DD.\ + \ Any data before this date will not be replicated." + default: "2020-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 3 + bulk_window_in_days: + type: "integer" + title: "GraphQL BULK Date Range in Days" + description: "Defines what would be a date range per single BULK Job" + default: 30 + fetch_transactions_user_id: + type: "boolean" + title: "Add `user_id` to Transactions (slower)" + description: + "Defines which API type (REST/BULK) to use to fetch `Transactions`\ + \ data. If you are a `Shopify Plus` user, leave the default value to speed\ + \ up the fetch." + default: false + job_product_variants_include_pres_prices: + type: "boolean" + title: "Add `Presentment prices` to Product Variants" + description: + "If enabled, the `Product Variants` stream attempts to include\ + \ `Presentment prices` field (may affect the performance)." + default: true + job_termination_threshold: + type: "integer" + title: "BULK Job termination threshold" + description: + "The max time in seconds, after which the single BULK Job should\ + \ be `CANCELED` and retried. The bigger the value the longer the BULK\ + \ Job is allowed to run." + default: 7200 + minimum: 3600 + maximum: 21600 + job_checkpoint_interval: + type: "integer" + title: "BULK Job checkpoint (rows collected)" + description: "The threshold, after which the single BULK Job should be checkpointed." + default: 100000 + minimum: 15000 + maximum: 200000 + source-omnisend: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "omnisend" + const: "omnisend" + enum: + - "omnisend" + order: 0 + type: "string" + source-omnisend-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + source-mongodb-v2: + title: "MongoDb Source Spec" + type: "object" + required: + - "database_config" + - "sourceType" + properties: + database_config: + type: "object" + title: "Cluster Type" + description: "Configures the MongoDB cluster type." + order: 1 + group: "connection" + display_type: "radio" + oneOf: + - title: "MongoDB Atlas Replica Set" + description: "MongoDB Atlas-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + - "username" + - "password" + - "auth_source" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "ATLAS_REPLICA_SET" + order: 1 + enum: + - "ATLAS_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate." + examples: + - "mongodb+srv://cluster0.abcd1.mongodb.net/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\ + \ for more details." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + - title: "Self-Managed Replica Set" + description: "MongoDB self-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "SELF_MANAGED_REPLICA_SET" + order: 1 + enum: + - "SELF_MANAGED_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\ + \ for more information." + examples: + - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/" + - "mongodb://example.host.com:27017/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to 300 seconds.\ + \ Valid range: 120 seconds to 1200 seconds." + default: 300 + order: 8 + min: 120 + max: 1200 + group: "advanced" + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with memory\ + \ consumption and efficiency of the connector, please be careful." + default: 10000 + order: 9 + min: 1000 + max: 10000 + group: "advanced" + discover_sample_size: + type: "integer" + title: "Document discovery sample size (Advanced)" + description: + "The maximum number of documents to sample when attempting\ + \ to discover the unique fields for a collection." + default: 10000 + order: 10 + minimum: 10 + maximum: 100000 + group: "advanced" + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data in\ + \ case of an stale/invalid cursor value into the WAL. If 'Fail sync' is\ + \ chosen, a user will have to manually reset the connection before being\ + \ able to continue syncing data. If 'Re-sync data' is chosen, Airbyte\ + \ will automatically trigger a refresh but could lead to higher cloud\ + \ costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 11 + group: "advanced" + update_capture_mode: + type: "string" + title: "Capture mode (Advanced)" + description: + "Determines how Airbyte looks up the value of an updated document.\ + \ If 'Lookup' is chosen, the current value of the document will be read.\ + \ If 'Post Image' is chosen, then the version of the document immediately\ + \ after an update will be read. WARNING : Severe data loss will occur\ + \ if this option is chosen and the appropriate settings are not set on\ + \ your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images." + enum: + - "Lookup" + - "Post Image" + default: "Lookup" + order: 12 + group: "advanced" + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 13 + group: "advanced" + sourceType: + title: "mongodb-v2" + const: "mongodb-v2" + enum: + - "mongodb-v2" + order: 0 + type: "string" + groups: + - id: "connection" + - id: "advanced" + title: "Advanced" + source-mongodb-v2-update: + title: "MongoDb Source Spec" + type: "object" + required: + - "database_config" + properties: + database_config: + type: "object" + title: "Cluster Type" + description: "Configures the MongoDB cluster type." + order: 1 + group: "connection" + display_type: "radio" + oneOf: + - title: "MongoDB Atlas Replica Set" + description: "MongoDB Atlas-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + - "username" + - "password" + - "auth_source" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "ATLAS_REPLICA_SET" + order: 1 + enum: + - "ATLAS_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate." + examples: + - "mongodb+srv://cluster0.abcd1.mongodb.net/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\ + \ for more details." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + - title: "Self-Managed Replica Set" + description: "MongoDB self-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "SELF_MANAGED_REPLICA_SET" + order: 1 + enum: + - "SELF_MANAGED_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\ + \ for more information." + examples: + - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/" + - "mongodb://example.host.com:27017/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to 300 seconds.\ + \ Valid range: 120 seconds to 1200 seconds." + default: 300 + order: 8 + min: 120 + max: 1200 + group: "advanced" + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with memory\ + \ consumption and efficiency of the connector, please be careful." + default: 10000 + order: 9 + min: 1000 + max: 10000 + group: "advanced" + discover_sample_size: + type: "integer" + title: "Document discovery sample size (Advanced)" + description: + "The maximum number of documents to sample when attempting\ + \ to discover the unique fields for a collection." + default: 10000 + order: 10 + minimum: 10 + maximum: 100000 + group: "advanced" + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data in\ + \ case of an stale/invalid cursor value into the WAL. If 'Fail sync' is\ + \ chosen, a user will have to manually reset the connection before being\ + \ able to continue syncing data. If 'Re-sync data' is chosen, Airbyte\ + \ will automatically trigger a refresh but could lead to higher cloud\ + \ costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 11 + group: "advanced" + update_capture_mode: + type: "string" + title: "Capture mode (Advanced)" + description: + "Determines how Airbyte looks up the value of an updated document.\ + \ If 'Lookup' is chosen, the current value of the document will be read.\ + \ If 'Post Image' is chosen, then the version of the document immediately\ + \ after an update will be read. WARNING : Severe data loss will occur\ + \ if this option is chosen and the appropriate settings are not set on\ + \ your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images." + enum: + - "Lookup" + - "Post Image" + default: "Lookup" + order: 12 + group: "advanced" + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 13 + group: "advanced" + groups: + - id: "connection" + - id: "advanced" + title: "Advanced" + source-retently: + title: "Retently Api Spec" + type: "object" + properties: + credentials: + title: "Authentication Mechanism" + description: "Choose how to authenticate to Retently" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Retently (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Retently Refresh Token which can be used to fetch new\ + \ Bearer Tokens when the current one expires." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with API Token" + required: + - "api_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_key: + title: "API Token" + description: + "Retently API Token. See the docs for more information on how to obtain this key." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "retently" + const: "retently" + enum: + - "retently" + order: 0 + type: "string" + source-retently-update: + title: "Retently Api Spec" + type: "object" + properties: + credentials: + title: "Authentication Mechanism" + description: "Choose how to authenticate to Retently" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Retently (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Retently Refresh Token which can be used to fetch new\ + \ Bearer Tokens when the current one expires." + airbyte_secret: true + - type: "object" + title: "Authenticate with API Token" + required: + - "api_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_key: + title: "API Token" + description: + "Retently API Token. See the docs for more information on how to obtain this key." + type: "string" + airbyte_secret: true + source-coda: + type: "object" + required: + - "auth_token" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication token" + description: "Bearer token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "coda" + const: "coda" + enum: + - "coda" + order: 0 + type: "string" + source-coda-update: + type: "object" + required: + - "auth_token" + properties: + auth_token: + type: "string" + title: "Authentication token" + description: "Bearer token" + airbyte_secret: true + order: 0 + source-fleetio: + type: "object" + required: + - "api_key" + - "account_token" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_token: + type: "string" + order: 1 + title: "account_token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "fleetio" + const: "fleetio" + enum: + - "fleetio" + order: 0 + type: "string" + source-fleetio-update: + type: "object" + required: + - "api_key" + - "account_token" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + airbyte_secret: true + account_token: + type: "string" + order: 1 + title: "account_token" + airbyte_secret: true + source-pendo: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "pendo" + const: "pendo" + enum: + - "pendo" + order: 0 + type: "string" + source-pendo-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + source-jotform: + type: "object" + required: + - "api_key" + - "api_endpoint" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + end_date: + type: "string" + order: 3 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_endpoint: + type: "object" + oneOf: + - type: "object" + title: "Basic" + required: + - "url_prefix" + properties: + url_prefix: + type: "string" + description: + "You can access our API through the following URLs -\ + \ Standard API Usage (Use the default API URL - https://api.jotform.com),\ + \ For EU (Use the EU API URL - https://eu-api.jotform.com), For\ + \ HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)" + enum: + - "Standard" + - "EU" + - "HIPAA" + title: "Base URL Prefix" + default: "Standard" + api_endpoint: + type: "string" + const: "basic" + order: 0 + enum: + - "basic" + - type: "object" + title: "Enterprise" + required: + - "enterprise_url" + properties: + api_endpoint: + type: "string" + const: "enterprise" + order: 0 + enum: + - "enterprise" + enterprise_url: + type: "string" + description: + "Upgrade to Enterprise to make your API url your-domain.com/API\ + \ or subdomain.jotform.com/API instead of api.jotform.com" + title: "Enterprise URL" + order: 1 + title: "API Endpoint" + sourceType: + title: "jotform" + const: "jotform" + enum: + - "jotform" + order: 0 + type: "string" + source-jotform-update: + type: "object" + required: + - "api_key" + - "api_endpoint" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + end_date: + type: "string" + order: 3 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_endpoint: + type: "object" + oneOf: + - type: "object" + title: "Basic" + required: + - "url_prefix" + properties: + url_prefix: + type: "string" + description: + "You can access our API through the following URLs -\ + \ Standard API Usage (Use the default API URL - https://api.jotform.com),\ + \ For EU (Use the EU API URL - https://eu-api.jotform.com), For\ + \ HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)" + enum: + - "Standard" + - "EU" + - "HIPAA" + title: "Base URL Prefix" + default: "Standard" + api_endpoint: + type: "string" + const: "basic" + order: 0 + enum: + - "basic" + - type: "object" + title: "Enterprise" + required: + - "enterprise_url" + properties: + api_endpoint: + type: "string" + const: "enterprise" + order: 0 + enum: + - "enterprise" + enterprise_url: + type: "string" + description: + "Upgrade to Enterprise to make your API url your-domain.com/API\ + \ or subdomain.jotform.com/API instead of api.jotform.com" + title: "Enterprise URL" + order: 1 + title: "API Endpoint" + source-instagram: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated. If left blank, the start date will be\ + \ set to 2 years before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: + "The value of the access token generated with instagram_basic,\ + \ instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram\ + \ Public Content Access permissions. See the docs for more information" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + sourceType: + title: "instagram" + const: "instagram" + enum: + - "instagram" + order: 0 + type: "string" + required: + - "access_token" + - "sourceType" + source-instagram-update: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated. If left blank, the start date will be\ + \ set to 2 years before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: + "The value of the access token generated with instagram_basic,\ + \ instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram\ + \ Public Content Access permissions. See the docs for more information" + airbyte_secret: true + type: "string" + required: + - "access_token" + source-dbt: + type: "object" + required: + - "api_key_2" + - "account_id" + - "sourceType" + properties: + api_key_2: + type: "string" + order: 0 + title: "Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + order: 1 + title: "account_id" + sourceType: + title: "dbt" + const: "dbt" + enum: + - "dbt" + order: 0 + type: "string" + source-dbt-update: + type: "object" + required: + - "api_key_2" + - "account_id" + properties: + api_key_2: + type: "string" + order: 0 + title: "Token" + airbyte_secret: true + account_id: + type: "string" + order: 1 + title: "account_id" + source-nylas: + type: "object" + required: + - "api_key" + - "api_server" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + api_server: + type: "string" + enum: + - "us" + - "eu" + order: 1 + title: "API Server" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "nylas" + const: "nylas" + enum: + - "nylas" + order: 0 + type: "string" + source-nylas-update: + type: "object" + required: + - "api_key" + - "api_server" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + api_server: + type: "string" + enum: + - "us" + - "eu" + order: 1 + title: "API Server" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + source-s3: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + role_arn: + title: "AWS Role ARN" + description: + "Specifies the Amazon Resource Name (ARN) of an IAM role that\ + \ you want to use to perform operations requested using this profile.\ + \ Set the External ID to the Airbyte workspace ID, which can be found\ + \ in the URL of this page." + order: 6 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + examples: + - "my-s3-endpoint.com" + - "https://my-s3-endpoint.com" + order: 4 + type: "string" + region_name: + title: "AWS Region" + description: + "AWS region where the S3 bucket is located. If not provided,\ + \ the region will be determined automatically." + order: 5 + type: "string" + sourceType: + title: "s3" + const: "s3" + enum: + - "s3" + order: 0 + type: "string" + required: + - "streams" + - "bucket" + - "sourceType" + source-s3-update: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + role_arn: + title: "AWS Role ARN" + description: + "Specifies the Amazon Resource Name (ARN) of an IAM role that\ + \ you want to use to perform operations requested using this profile.\ + \ Set the External ID to the Airbyte workspace ID, which can be found\ + \ in the URL of this page." + order: 6 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + examples: + - "my-s3-endpoint.com" + - "https://my-s3-endpoint.com" + order: 4 + type: "string" + region_name: + title: "AWS Region" + description: + "AWS region where the S3 bucket is located. If not provided,\ + \ the region will be determined automatically." + order: 5 + type: "string" + required: + - "streams" + - "bucket" + source-azure-blob-storage: + title: "SourceAzureBlobStorageSpec" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\ + \ File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Azure Blob Storage" + type: "object" + order: 2 + oneOf: + - title: "Authenticate via Oauth2" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2" + const: "oauth2" + enum: + - "oauth2" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft Azure Application user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Authenticate via Storage Account Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "storage_account_key" + const: "storage_account_key" + enum: + - "storage_account_key" + type: "string" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "azure_blob_storage_account_key" + - "auth_type" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + order: 3 + type: "string" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + description: "The name of the Azure blob storage container." + examples: + - "airbytetescontainername" + order: 4 + type: "string" + azure_blob_storage_endpoint: + title: "Endpoint Domain Name" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + order: 11 + type: "string" + sourceType: + title: "azure-blob-storage" + const: "azure-blob-storage" + enum: + - "azure-blob-storage" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "azure_blob_storage_account_name" + - "azure_blob_storage_container_name" + - "sourceType" + source-azure-blob-storage-update: + title: "SourceAzureBlobStorageSpec" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\ + \ File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Azure Blob Storage" + type: "object" + order: 2 + oneOf: + - title: "Authenticate via Oauth2" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2" + const: "oauth2" + enum: + - "oauth2" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft Azure Application user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Authenticate via Storage Account Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "storage_account_key" + const: "storage_account_key" + enum: + - "storage_account_key" + type: "string" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + order: 3 + type: "string" + required: + - "azure_blob_storage_account_key" + - "auth_type" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + order: 3 + type: "string" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + description: "The name of the Azure blob storage container." + examples: + - "airbytetescontainername" + order: 4 + type: "string" + azure_blob_storage_endpoint: + title: "Endpoint Domain Name" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + order: 11 + type: "string" + required: + - "streams" + - "credentials" + - "azure_blob_storage_account_name" + - "azure_blob_storage_container_name" + source-close-com: + title: "Close.com Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Replication Start Date" + type: "string" + description: + "The start date to sync data; all data after this date will\ + \ be replicated. Leave blank to retrieve all the data available in the\ + \ account. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + sourceType: + title: "close-com" + const: "close-com" + enum: + - "close-com" + order: 0 + type: "string" + source-close-com-update: + title: "Close.com Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + start_date: + title: "Replication Start Date" + type: "string" + description: + "The start date to sync data; all data after this date will\ + \ be replicated. Leave blank to retrieve all the data available in the\ + \ account. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + source-zendesk-sunshine: + type: "object" + required: + - "start_date" + - "subdomain" + - "sourceType" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: + "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + sourceType: + title: "zendesk-sunshine" + const: "zendesk-sunshine" + enum: + - "zendesk-sunshine" + order: 0 + type: "string" + source-zendesk-sunshine-update: + type: "object" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: + "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + source-canny: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "You can find your secret API key in Your Canny Subdomain >\ + \ Settings > API" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "canny" + const: "canny" + enum: + - "canny" + order: 0 + type: "string" + source-canny-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "You can find your secret API key in Your Canny Subdomain >\ + \ Settings > API" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-exchange-rates: + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + - "sourceType" + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + access_key: + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + base: + type: "string" + description: + "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + ignore_weekends: + type: "boolean" + description: "Ignore weekends? (Exchanges don't run on weekends)" + default: true + sourceType: + title: "exchange-rates" + const: "exchange-rates" + enum: + - "exchange-rates" + order: 0 + type: "string" + source-exchange-rates-update: + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + access_key: + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + base: + type: "string" + description: + "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + ignore_weekends: + type: "boolean" + description: "Ignore weekends? (Exchanges don't run on weekends)" + default: true + source-woocommerce: + type: "object" + title: "Woocommerce Spec" + required: + - "api_key" + - "api_secret" + - "shop" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "Customer Key" + description: "Customer Key for API in WooCommerce shop" + airbyte_secret: true + x-speakeasy-param-sensitive: true + api_secret: + type: "string" + order: 1 + title: "Customer Secret" + description: "Customer Secret for API in WooCommerce shop" + airbyte_secret: true + x-speakeasy-param-sensitive: true + shop: + type: "string" + order: 2 + title: "Shop Name" + description: + "The name of the store. For https://EXAMPLE.com, the shop name\ + \ is 'EXAMPLE.com'." + start_date: + type: "string" + order: 3 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2021-01-01" + description: "The date you would like to replicate data from. Format: YYYY-MM-DD" + sourceType: + title: "woocommerce" + const: "woocommerce" + enum: + - "woocommerce" + order: 0 + type: "string" + source-woocommerce-update: + type: "object" + title: "Woocommerce Spec" + required: + - "api_key" + - "api_secret" + - "shop" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "Customer Key" + description: "Customer Key for API in WooCommerce shop" + airbyte_secret: true + api_secret: + type: "string" + order: 1 + title: "Customer Secret" + description: "Customer Secret for API in WooCommerce shop" + airbyte_secret: true + shop: + type: "string" + order: 2 + title: "Shop Name" + description: + "The name of the store. For https://EXAMPLE.com, the shop name\ + \ is 'EXAMPLE.com'." + start_date: + type: "string" + order: 3 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2021-01-01" + description: "The date you would like to replicate data from. Format: YYYY-MM-DD" + source-linkedin-pages: + type: "object" + required: + - "org_id" + - "sourceType" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the LinkedIn developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_secret: + type: "string" + title: "Client secret" + description: "The client secret of the LinkedIn developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access token" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + title: "Authentication" + org_id: + type: "string" + order: 1 + title: "Organization ID" + examples: + - "123456789" + description: "Specify the Organization ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + default: "2023-01-01T00:00:00Z" + description: + "Start date for getting metrics per time period. Must be atmost\ + \ 12 months before the request date (UTC) and atleast 2 days prior to\ + \ the request date (UTC). See https://bit.ly/linkedin-pages-date-rules\ + \ {{ \"\\n\" }} {{ response.errorDetails }}" + time_granularity_type: + enum: + - "DAY" + - "MONTH" + type: "string" + order: 3 + title: "Time Granularity Type" + default: "DAY" + description: + "Granularity of the statistics for metrics per time period.\ + \ Must be either \"DAY\" or \"MONTH\"" + sourceType: + title: "linkedin-pages" + const: "linkedin-pages" + enum: + - "linkedin-pages" + order: 0 + type: "string" + source-linkedin-pages-update: + type: "object" + required: + - "org_id" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the LinkedIn developer application." + airbyte_secret: true + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_secret: + type: "string" + title: "Client secret" + description: "The client secret of the LinkedIn developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + - type: "object" + title: "Access token" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + order: 0 + title: "Authentication" + org_id: + type: "string" + order: 1 + title: "Organization ID" + examples: + - "123456789" + description: "Specify the Organization ID" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + default: "2023-01-01T00:00:00Z" + description: + "Start date for getting metrics per time period. Must be atmost\ + \ 12 months before the request date (UTC) and atleast 2 days prior to\ + \ the request date (UTC). See https://bit.ly/linkedin-pages-date-rules\ + \ {{ \"\\n\" }} {{ response.errorDetails }}" + time_granularity_type: + enum: + - "DAY" + - "MONTH" + type: "string" + order: 3 + title: "Time Granularity Type" + default: "DAY" + description: + "Granularity of the statistics for metrics per time period.\ + \ Must be either \"DAY\" or \"MONTH\"" + source-planhat: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Your Planhat API Access Token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "planhat" + const: "planhat" + enum: + - "planhat" + order: 0 + type: "string" + source-planhat-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Your Planhat API Access Token" + order: 0 + title: "API Token" + airbyte_secret: true + source-whisky-hunter: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "whisky-hunter" + const: "whisky-hunter" + enum: + - "whisky-hunter" + order: 0 + type: "string" + source-whisky-hunter-update: + type: "object" + required: [] + properties: {} + source-tvmaze-schedule: + type: "object" + required: + - "start_date" + - "domestic_schedule_country_code" + - "sourceType" + properties: + start_date: + type: "string" + description: "Start date for TV schedule retrieval. May be in the future." + order: 0 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: + "End date for TV schedule retrieval. May be in the future.\ + \ Optional.\n" + order: 1 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + domestic_schedule_country_code: + type: "string" + description: "Country code for domestic TV schedule retrieval." + examples: + - "US" + - "GB" + order: 2 + web_schedule_country_code: + type: "string" + description: + "ISO 3166-1 country code for web TV schedule retrieval. Leave\ + \ blank for\nall countries plus global web channels (e.g. Netflix). Alternatively,\n\ + set to 'global' for just global web channels.\n" + examples: + - "US" + - "GB" + - "global" + order: 3 + sourceType: + title: "tvmaze-schedule" + const: "tvmaze-schedule" + enum: + - "tvmaze-schedule" + order: 0 + type: "string" + source-tvmaze-schedule-update: + type: "object" + required: + - "start_date" + - "domestic_schedule_country_code" + properties: + start_date: + type: "string" + description: "Start date for TV schedule retrieval. May be in the future." + order: 0 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: + "End date for TV schedule retrieval. May be in the future.\ + \ Optional.\n" + order: 1 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + domestic_schedule_country_code: + type: "string" + description: "Country code for domestic TV schedule retrieval." + examples: + - "US" + - "GB" + order: 2 + web_schedule_country_code: + type: "string" + description: + "ISO 3166-1 country code for web TV schedule retrieval. Leave\ + \ blank for\nall countries plus global web channels (e.g. Netflix). Alternatively,\n\ + set to 'global' for just global web channels.\n" + examples: + - "US" + - "GB" + - "global" + order: 3 + source-salesloft: + type: "object" + required: + - "credentials" + - "start_date" + - "sourceType" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "Authenticate via OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "auth_type" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Salesloft developer application." + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Salesloft developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining a new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + format: "date-time" + description: "The date-time when the access token should be refreshed." + - type: "object" + title: "Authenticate via API Key" + required: + - "api_key" + - "auth_type" + properties: + api_key: + type: "string" + title: "API Key" + description: + "API Key for making authenticated requests. More instruction\ + \ on how to find this value in our docs" + airbyte_secret: true + x-speakeasy-param-sensitive: true + auth_type: + type: "string" + const: "api_key" + enum: + - "api_key" + order: 0 + title: "Credentials" + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + examples: + - "2020-11-16T00:00:00Z" + description: + "The date from which you'd like to replicate data for Salesloft\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + sourceType: + title: "salesloft" + const: "salesloft" + enum: + - "salesloft" + order: 0 + type: "string" + source-salesloft-update: + type: "object" + required: + - "credentials" + - "start_date" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "Authenticate via OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "auth_type" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Salesloft developer application." + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Salesloft developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining a new access token." + airbyte_secret: true + token_expiry_date: + type: "string" + format: "date-time" + description: "The date-time when the access token should be refreshed." + - type: "object" + title: "Authenticate via API Key" + required: + - "api_key" + - "auth_type" + properties: + api_key: + type: "string" + title: "API Key" + description: + "API Key for making authenticated requests. More instruction\ + \ on how to find this value in our docs" + airbyte_secret: true + auth_type: + type: "string" + const: "api_key" + enum: + - "api_key" + order: 0 + title: "Credentials" + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + examples: + - "2020-11-16T00:00:00Z" + description: + "The date from which you'd like to replicate data for Salesloft\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + source-shortio: + title: "Shortio Spec" + type: "object" + required: + - "domain_id" + - "secret_key" + - "start_date" + - "sourceType" + properties: + domain_id: + type: "string" + desciprtion: "Short.io Domain ID" + title: "Domain ID" + airbyte_secret: false + x-speakeasy-param-sensitive: true + secret_key: + type: "string" + title: "Secret Key" + description: "Short.io Secret Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-07-30T03:43:59.244Z" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "shortio" + const: "shortio" + enum: + - "shortio" + order: 0 + type: "string" + source-shortio-update: + title: "Shortio Spec" + type: "object" + required: + - "domain_id" + - "secret_key" + - "start_date" + properties: + domain_id: + type: "string" + desciprtion: "Short.io Domain ID" + title: "Domain ID" + airbyte_secret: false + secret_key: + type: "string" + title: "Secret Key" + description: "Short.io Secret Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-07-30T03:43:59.244Z" + airbyte_secret: false + source-instatus: + title: "Instatus Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Instatus REST API key" + x-speakeasy-param-sensitive: true + sourceType: + title: "instatus" + const: "instatus" + enum: + - "instatus" + order: 0 + type: "string" + source-instatus-update: + title: "Instatus Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Instatus REST API key" + source-yandex-metrica: + title: "Yandex Metrica Spec" + type: "object" + required: + - "auth_token" + - "counter_id" + - "start_date" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication Token" + description: "Your Yandex Metrica API access token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + counter_id: + type: "string" + title: "Counter ID" + description: "Counter ID" + pattern: "^[0-9]+$" + order: 1 + start_date: + title: "Start Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\"." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 2 + end_date: + title: "End Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\". If not provided will sync till most recent date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 3 + sourceType: + title: "yandex-metrica" + const: "yandex-metrica" + enum: + - "yandex-metrica" + order: 0 + type: "string" + source-yandex-metrica-update: + title: "Yandex Metrica Spec" + type: "object" + required: + - "auth_token" + - "counter_id" + - "start_date" + properties: + auth_token: + type: "string" + title: "Authentication Token" + description: "Your Yandex Metrica API access token" + airbyte_secret: true + order: 0 + counter_id: + type: "string" + title: "Counter ID" + description: "Counter ID" + pattern: "^[0-9]+$" + order: 1 + start_date: + title: "Start Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\"." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 2 + end_date: + title: "End Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\". If not provided will sync till most recent date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 3 + source-vwo: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "vwo" + const: "vwo" + enum: + - "vwo" + order: 0 + type: "string" + source-vwo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-aircall: + type: "object" + required: + - "api_id" + - "api_token" + - "start_date" + - "sourceType" + properties: + api_id: + type: "string" + description: "App ID found at settings https://dashboard.aircall.io/integrations/api-keys" + title: "API ID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + api_token: + type: "string" + description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)" + title: "API Token" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + sourceType: + title: "aircall" + const: "aircall" + enum: + - "aircall" + order: 0 + type: "string" + source-aircall-update: + type: "object" + required: + - "api_id" + - "api_token" + - "start_date" + properties: + api_id: + type: "string" + description: "App ID found at settings https://dashboard.aircall.io/integrations/api-keys" + title: "API ID" + airbyte_secret: true + order: 0 + api_token: + type: "string" + description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)" + title: "API Token" + airbyte_secret: true + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + source-clickup-api: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + include_closed_tasks: + type: "boolean" + description: + "Include or exclude closed tasks. By default, they are excluded.\ + \ See here." + order: 5 + title: "Include Closed Tasks" + default: false + sourceType: + title: "clickup-api" + const: "clickup-api" + enum: + - "clickup-api" + order: 0 + type: "string" + source-clickup-api-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + order: 0 + airbyte_secret: true + include_closed_tasks: + type: "boolean" + description: + "Include or exclude closed tasks. By default, they are excluded.\ + \ See here." + order: 5 + title: "Include Closed Tasks" + default: false + source-ezofficeinventory: + type: "object" + required: + - "api_key" + - "subdomain" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your EZOfficeInventory Access Token. API Access is disabled\ + \ by default. Enable API Access in Settings > Integrations > API Integration\ + \ and click on Update to generate a new access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: + "The company name used in signup, also visible in the URL when\ + \ logged in." + name: "subdomain" + order: 1 + title: "Subdomain" + airbyte_secret: false + x-speakeasy-param-sensitive: true + start_date: + type: "string" + description: + "Earliest date you want to sync historical streams (inventory_histories,\ + \ asset_histories, asset_stock_histories) from" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "ezofficeinventory" + const: "ezofficeinventory" + enum: + - "ezofficeinventory" + order: 0 + type: "string" + source-ezofficeinventory-update: + type: "object" + required: + - "api_key" + - "subdomain" + - "start_date" + properties: + api_key: + type: "string" + description: + "Your EZOfficeInventory Access Token. API Access is disabled\ + \ by default. Enable API Access in Settings > Integrations > API Integration\ + \ and click on Update to generate a new access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: + "The company name used in signup, also visible in the URL when\ + \ logged in." + name: "subdomain" + order: 1 + title: "Subdomain" + airbyte_secret: false + start_date: + type: "string" + description: + "Earliest date you want to sync historical streams (inventory_histories,\ + \ asset_histories, asset_stock_histories) from" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-snapchat-marketing: + title: "Snapchat Marketing Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + description: + "Date in the format 2022-01-01. Any data before this date will\ + \ not be replicated." + examples: + - "2022-01-01" + default: "2022-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "Date in the format 2017-01-25. Any data after this date will\ + \ not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-30" + order: 4 + format: "date" + action_report_time: + type: "string" + enum: + - "conversion" + - "impression" + title: "Action Report Time" + description: "Specifies the principle for conversion reporting." + default: "conversion" + order: 5 + swipe_up_attribution_window: + type: "string" + title: "Swipe Up Attribution Window" + description: "Attribution window for swipe ups." + enum: + - "1_DAY" + - "7_DAY" + - "28_DAY" + default: "28_DAY" + order: 6 + view_attribution_window: + type: "string" + title: "View Attribution Window" + description: "Attribution window for views." + enum: + - "1_HOUR" + - "3_HOUR" + - "6_HOUR" + - "1_DAY" + - "7_DAY" + default: "1_DAY" + order: 7 + sourceType: + title: "snapchat-marketing" + const: "snapchat-marketing" + enum: + - "snapchat-marketing" + order: 0 + type: "string" + source-snapchat-marketing-update: + title: "Snapchat Marketing Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 2 + start_date: + title: "Start Date" + type: "string" + description: + "Date in the format 2022-01-01. Any data before this date will\ + \ not be replicated." + examples: + - "2022-01-01" + default: "2022-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "Date in the format 2017-01-25. Any data after this date will\ + \ not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-30" + order: 4 + format: "date" + action_report_time: + type: "string" + enum: + - "conversion" + - "impression" + title: "Action Report Time" + description: "Specifies the principle for conversion reporting." + default: "conversion" + order: 5 + swipe_up_attribution_window: + type: "string" + title: "Swipe Up Attribution Window" + description: "Attribution window for swipe ups." + enum: + - "1_DAY" + - "7_DAY" + - "28_DAY" + default: "28_DAY" + order: 6 + view_attribution_window: + type: "string" + title: "View Attribution Window" + description: "Attribution window for views." + enum: + - "1_HOUR" + - "3_HOUR" + - "6_HOUR" + - "1_DAY" + - "7_DAY" + default: "1_DAY" + order: 7 + source-gitlab: + title: "Source Gitlab Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for GitLab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\ + \ will be replicated. All data generated after this date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + api_url: + type: "string" + examples: + - "gitlab.com" + - "https://gitlab.com" + - "https://gitlab.company.org" + title: "API URL" + default: "gitlab.com" + description: "Please enter your basic URL from GitLab instance." + order: 2 + groups_list: + type: "array" + items: + type: "string" + examples: + - "airbyte.io" + title: "Groups" + description: "List of groups. e.g. airbyte.io." + order: 3 + projects_list: + type: "array" + items: + type: "string" + title: "Projects" + examples: + - "airbyte.io/documentation" + description: + "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab." + order: 4 + sourceType: + title: "gitlab" + const: "gitlab" + enum: + - "gitlab" + order: 0 + type: "string" + source-gitlab-update: + title: "Source Gitlab Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for GitLab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\ + \ will be replicated. All data generated after this date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + api_url: + type: "string" + examples: + - "gitlab.com" + - "https://gitlab.com" + - "https://gitlab.company.org" + title: "API URL" + default: "gitlab.com" + description: "Please enter your basic URL from GitLab instance." + order: 2 + groups_list: + type: "array" + items: + type: "string" + examples: + - "airbyte.io" + title: "Groups" + description: "List of groups. e.g. airbyte.io." + order: 3 + projects_list: + type: "array" + items: + type: "string" + title: "Projects" + examples: + - "airbyte.io/documentation" + description: + "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab." + order: 4 + source-launchdarkly: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + title: "Access token" + description: + "Your Access token. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "launchdarkly" + const: "launchdarkly" + enum: + - "launchdarkly" + order: 0 + type: "string" + source-launchdarkly-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + title: "Access token" + description: + "Your Access token. See here." + airbyte_secret: true + order: 0 + source-snowflake: + title: "Snowflake Source Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Key Pair Authentication" + type: "object" + order: 1 + required: + - "username" + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + order: 0 + enum: + - "Key Pair Authentication" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 2 + properties: + auth_type: + type: "string" + const: "username/password" + order: 0 + enum: + - "username/password" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + x-speakeasy-param-sensitive: true + order: 0 + host: + description: + "The host domain of the snowflake instance (must include the\ + \ account, region, cloud environment, and end with snowflakecomputing.com)." + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + type: "string" + title: "Account Name" + order: 1 + role: + description: "The role you created for Airbyte to access Snowflake." + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 2 + warehouse: + description: "The warehouse you created for Airbyte to access data." + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 3 + database: + description: "The database you created for Airbyte to access data." + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 4 + schema: + description: + "The source Snowflake schema tables. Leave empty to access\ + \ tables from multiple schemas." + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Schema" + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + sourceType: + title: "snowflake" + const: "snowflake" + enum: + - "snowflake" + order: 0 + type: "string" + source-snowflake-update: + title: "Snowflake Source Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + - title: "Key Pair Authentication" + type: "object" + order: 1 + required: + - "username" + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + order: 0 + enum: + - "Key Pair Authentication" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + order: 2 + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + order: 3 + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 2 + properties: + auth_type: + type: "string" + const: "username/password" + order: 0 + enum: + - "username/password" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + order: 0 + host: + description: + "The host domain of the snowflake instance (must include the\ + \ account, region, cloud environment, and end with snowflakecomputing.com)." + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + type: "string" + title: "Account Name" + order: 1 + role: + description: "The role you created for Airbyte to access Snowflake." + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 2 + warehouse: + description: "The warehouse you created for Airbyte to access data." + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 3 + database: + description: "The database you created for Airbyte to access data." + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 4 + schema: + description: + "The source Snowflake schema tables. Leave empty to access\ + \ tables from multiple schemas." + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Schema" + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + source-auth0: + title: "Auth0 Management API Spec" + type: "object" + required: + - "base_url" + - "credentials" + - "sourceType" + properties: + base_url: + type: "string" + title: "Base URL" + examples: + - "https://dev-yourOrg.us.auth0.com/" + description: + "The Authentication API is served over HTTPS. All URLs referenced\ + \ in the documentation have the following base `https://YOUR_DOMAIN`" + credentials: + title: "Authentication Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2 Confidential Application" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "audience" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_confidential_application" + order: 0 + enum: + - "oauth2_confidential_application" + client_id: + title: "Client ID" + description: + "Your application's Client ID. You can find this value\ + \ on the application's\ + \ settings tab after you login the admin portal." + type: "string" + examples: + - "Client_ID" + client_secret: + title: "Client Secret" + description: + "Your application's Client Secret. You can find this\ + \ value on the application's settings tab after you login the admin portal." + type: "string" + examples: + - "Client_Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + audience: + title: "Audience" + description: + "The audience for the token, which is your API. You can\ + \ find this in the Identifier field on your API's settings tab" + type: "string" + examples: + - "https://dev-yourOrg.us.auth0.com/api/v2/" + - type: "object" + title: "OAuth2 Access Token" + required: + - "access_token" + - "auth_type" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_access_token" + examples: + - "oauth2_access_token" + order: 0 + enum: + - "oauth2_access_token" + access_token: + title: "OAuth2 Access Token" + description: + "Also called API Access Token The access token used to call the Auth0 Management\ + \ API Token. It's a JWT that contains specific grant permissions\ + \ knowns as scopes." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "auth0" + const: "auth0" + enum: + - "auth0" + order: 0 + type: "string" + source-auth0-update: + title: "Auth0 Management API Spec" + type: "object" + required: + - "base_url" + - "credentials" + properties: + base_url: + type: "string" + title: "Base URL" + examples: + - "https://dev-yourOrg.us.auth0.com/" + description: + "The Authentication API is served over HTTPS. All URLs referenced\ + \ in the documentation have the following base `https://YOUR_DOMAIN`" + credentials: + title: "Authentication Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2 Confidential Application" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "audience" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_confidential_application" + order: 0 + enum: + - "oauth2_confidential_application" + client_id: + title: "Client ID" + description: + "Your application's Client ID. You can find this value\ + \ on the application's\ + \ settings tab after you login the admin portal." + type: "string" + examples: + - "Client_ID" + client_secret: + title: "Client Secret" + description: + "Your application's Client Secret. You can find this\ + \ value on the application's settings tab after you login the admin portal." + type: "string" + examples: + - "Client_Secret" + airbyte_secret: true + audience: + title: "Audience" + description: + "The audience for the token, which is your API. You can\ + \ find this in the Identifier field on your API's settings tab" + type: "string" + examples: + - "https://dev-yourOrg.us.auth0.com/api/v2/" + - type: "object" + title: "OAuth2 Access Token" + required: + - "access_token" + - "auth_type" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_access_token" + examples: + - "oauth2_access_token" + order: 0 + enum: + - "oauth2_access_token" + access_token: + title: "OAuth2 Access Token" + description: + "Also called API Access Token The access token used to call the Auth0 Management\ + \ API Token. It's a JWT that contains specific grant permissions\ + \ knowns as scopes." + type: "string" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false + source-linnworks: + title: "Linnworks Spec" + type: "object" + required: + - "application_id" + - "application_secret" + - "token" + - "start_date" + - "sourceType" + properties: + application_id: + title: "Application ID." + description: "Linnworks Application ID" + type: "string" + application_secret: + title: "Application Secret" + description: "Linnworks Application Secret" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + token: + title: "API Token" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + type: "string" + format: "date-time" + sourceType: + title: "linnworks" + const: "linnworks" + enum: + - "linnworks" + order: 0 + type: "string" + source-linnworks-update: + title: "Linnworks Spec" + type: "object" + required: + - "application_id" + - "application_secret" + - "token" + - "start_date" + properties: + application_id: + title: "Application ID." + description: "Linnworks Application ID" + type: "string" + application_secret: + title: "Application Secret" + description: "Linnworks Application Secret" + type: "string" + airbyte_secret: true + token: + title: "API Token" + type: "string" + airbyte_secret: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + type: "string" + format: "date-time" + source-microsoft-sharepoint: + title: "Microsoft SharePoint Source Spec" + description: + "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the SharePoint API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access,\ + \ 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to\ + \ search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + sourceType: + title: "microsoft-sharepoint" + const: "microsoft-sharepoint" + enum: + - "microsoft-sharepoint" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "sourceType" + source-microsoft-sharepoint-update: + title: "Microsoft SharePoint Source Spec" + description: + "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the SharePoint API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access,\ + \ 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to\ + \ search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + required: + - "streams" + - "credentials" + source-amazon-sqs: + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + - "sourceType" + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: + "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: + "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: + "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + x-speakeasy-param-sensitive: true + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + x-speakeasy-param-sensitive: true + sourceType: + title: "amazon-sqs" + const: "amazon-sqs" + enum: + - "amazon-sqs" + order: 0 + type: "string" + source-amazon-sqs-update: + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: + "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: + "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: + "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + source-sonar-cloud: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + - "sourceType" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "sonar-cloud" + const: "sonar-cloud" + enum: + - "sonar-cloud" + order: 0 + type: "string" + source-sonar-cloud-update: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + source-clockify: + type: "object" + required: + - "api_key" + - "workspace_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "You can get your api access_key here This API is Case Sensitive." + order: 0 + x-speakeasy-param-sensitive: true + api_url: + type: "string" + title: "API Url" + description: + "The URL for the Clockify API. This should only need to be\ + \ modified if connecting to an enterprise version of Clockify." + default: "https://api.clockify.me" + order: 1 + workspace_id: + type: "string" + title: "Workspace Id" + description: "WorkSpace Id" + order: 2 + sourceType: + title: "clockify" + const: "clockify" + enum: + - "clockify" + order: 0 + type: "string" + source-clockify-update: + type: "object" + required: + - "api_key" + - "workspace_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "You can get your api access_key here This API is Case Sensitive." + order: 0 + api_url: + type: "string" + title: "API Url" + description: + "The URL for the Clockify API. This should only need to be\ + \ modified if connecting to an enterprise version of Clockify." + default: "https://api.clockify.me" + order: 1 + workspace_id: + type: "string" + title: "Workspace Id" + description: "WorkSpace Id" + order: 2 + source-marketo: + title: "Source Marketo Spec" + type: "object" + required: + - "domain_url" + - "client_id" + - "client_secret" + - "start_date" + - "sourceType" + properties: + domain_url: + title: "Domain URL" + type: "string" + order: 3 + description: + "Your Marketo Base URL. See the docs for info on how to obtain this." + examples: + - "https://000-AAA-000.mktorest.com" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Marketo developer application. See the\ + \ docs for info on how to obtain this." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Marketo developer application. See\ + \ the\ + \ docs for info on how to obtain this." + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + order: 2 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-09-25T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + sourceType: + title: "marketo" + const: "marketo" + enum: + - "marketo" + order: 0 + type: "string" + source-marketo-update: + title: "Source Marketo Spec" + type: "object" + required: + - "domain_url" + - "client_id" + - "client_secret" + - "start_date" + properties: + domain_url: + title: "Domain URL" + type: "string" + order: 3 + description: + "Your Marketo Base URL. See the docs for info on how to obtain this." + examples: + - "https://000-AAA-000.mktorest.com" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Marketo developer application. See the\ + \ docs for info on how to obtain this." + order: 0 + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Marketo developer application. See\ + \ the\ + \ docs for info on how to obtain this." + order: 1 + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + order: 2 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-09-25T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + source-pocket: + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + - "sourceType" + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: + "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: + "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + sourceType: + title: "pocket" + const: "pocket" + enum: + - "pocket" + order: 0 + type: "string" + source-pocket-update: + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: + "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: + "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + source-productboard: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Your Productboard access token. See https://developer.productboard.com/reference/authentication\ + \ for steps to generate one." + name: "api_key" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "productboard" + const: "productboard" + enum: + - "productboard" + order: 0 + type: "string" + source-productboard-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Your Productboard access token. See https://developer.productboard.com/reference/authentication\ + \ for steps to generate one." + name: "api_key" + order: 0 + title: "Access Token" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-smartsheets: + title: "Smartsheets Source Spec" + type: "object" + required: + - "credentials" + - "spreadsheet_id" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Access Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token to use for accessing your data from\ + \ Smartsheets. This access token must be generated by a user with\ + \ at least read access to the data you'd like to replicate. Generate\ + \ an access token in the Smartsheets main menu by clicking Account\ + \ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + spreadsheet_id: + title: "Sheet ID" + description: + "The spreadsheet ID. Find it by opening the spreadsheet then\ + \ navigating to File > Properties" + type: "string" + order: 1 + metadata_fields: + title: "Metadata Fields" + type: "array" + items: + title: "Validenums" + enum: + - "sheetcreatedAt" + - "sheetid" + - "sheetmodifiedAt" + - "sheetname" + - "sheetpermalink" + - "sheetversion" + - "sheetaccess_level" + - "row_id" + - "row_access_level" + - "row_created_at" + - "row_created_by" + - "row_expanded" + - "row_modified_by" + - "row_parent_id" + - "row_permalink" + - "row_number" + - "row_version" + description: "A List of available columns which metadata can be pulled from." + order: 3 + sourceType: + title: "smartsheets" + const: "smartsheets" + enum: + - "smartsheets" + order: 0 + type: "string" + source-smartsheets-update: + title: "Smartsheets Source Spec" + type: "object" + required: + - "credentials" + - "spreadsheet_id" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "API Access Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token to use for accessing your data from\ + \ Smartsheets. This access token must be generated by a user with\ + \ at least read access to the data you'd like to replicate. Generate\ + \ an access token in the Smartsheets main menu by clicking Account\ + \ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token." + airbyte_secret: true + spreadsheet_id: + title: "Sheet ID" + description: + "The spreadsheet ID. Find it by opening the spreadsheet then\ + \ navigating to File > Properties" + type: "string" + order: 1 + metadata_fields: + title: "Metadata Fields" + type: "array" + items: + title: "Validenums" + enum: + - "sheetcreatedAt" + - "sheetid" + - "sheetmodifiedAt" + - "sheetname" + - "sheetpermalink" + - "sheetversion" + - "sheetaccess_level" + - "row_id" + - "row_access_level" + - "row_created_at" + - "row_created_by" + - "row_expanded" + - "row_modified_by" + - "row_parent_id" + - "row_permalink" + - "row_number" + - "row_version" + description: "A List of available columns which metadata can be pulled from." + order: 3 + source-lob: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use for authentication. You can find your account's\ + \ API keys in your Dashboard Settings at https://dashboard.lob.com/settings/api-keys." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + sourceType: + title: "lob" + const: "lob" + enum: + - "lob" + order: 0 + type: "string" + source-lob-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use for authentication. You can find your account's\ + \ API keys in your Dashboard Settings at https://dashboard.lob.com/settings/api-keys." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + source-iterable: + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + sourceType: + title: "iterable" + const: "iterable" + enum: + - "iterable" + order: 0 + type: "string" + source-iterable-update: + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + source-mysql: + title: "MySql Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "replication_method" + - "sourceType" + properties: + host: + description: "The host name of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port to connect to." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + description: "The database name." + title: "Database" + type: "string" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + always_show: true + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For\ + \ more information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. Read more in the docs." + type: "object" + order: 7 + oneOf: + - title: "preferred" + description: + "Automatically attempt SSL connection. If the MySQL server\ + \ does not support SSL, continue with a regular connection." + required: + - "mode" + properties: + mode: + type: "string" + const: "preferred" + order: 0 + enum: + - "preferred" + - title: "required" + description: + "Always connect with SSL. If the MySQL server doesn’t support\ + \ SSL, the connection will not be established. Certificate Authority\ + \ (CA) and Hostname are not verified." + required: + - "mode" + properties: + mode: + type: "string" + const: "required" + order: 0 + enum: + - "required" + - title: "Verify CA" + description: + "Always connect with SSL. Verifies CA, but allows connection\ + \ even if Hostname does not match." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_ca" + order: 0 + enum: + - "verify_ca" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Verify Identity" + description: "Always connect with SSL. Verify both CA and Hostname." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_identity" + order: 0 + enum: + - "verify_identity" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 8 + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Binary Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the MySQL binary log. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 1 + always_show: true + server_time_zone: + type: "string" + title: "Configured server timezone for the MySQL source (Advanced)" + description: + "Enter the configured MySQL server timezone. This should\ + \ only be done if the configured timezone in your MySQL instance\ + \ does not conform to IANNA standard." + order: 2 + always_show: true + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 3 + always_show: true + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 4 + always_show: true + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "mysql" + const: "mysql" + enum: + - "mysql" + order: 0 + type: "string" + source-mysql-update: + title: "MySql Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "replication_method" + properties: + host: + description: "The host name of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port to connect to." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + description: "The database name." + title: "Database" + type: "string" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + always_show: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For\ + \ more information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. Read more in the docs." + type: "object" + order: 7 + oneOf: + - title: "preferred" + description: + "Automatically attempt SSL connection. If the MySQL server\ + \ does not support SSL, continue with a regular connection." + required: + - "mode" + properties: + mode: + type: "string" + const: "preferred" + order: 0 + enum: + - "preferred" + - title: "required" + description: + "Always connect with SSL. If the MySQL server doesn’t support\ + \ SSL, the connection will not be established. Certificate Authority\ + \ (CA) and Hostname are not verified." + required: + - "mode" + properties: + mode: + type: "string" + const: "required" + order: 0 + enum: + - "required" + - title: "Verify CA" + description: + "Always connect with SSL. Verifies CA, but allows connection\ + \ even if Hostname does not match." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_ca" + order: 0 + enum: + - "verify_ca" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "Verify Identity" + description: "Always connect with SSL. Verify both CA and Hostname." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_identity" + order: 0 + enum: + - "verify_identity" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 8 + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Binary Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the MySQL binary log. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 1 + always_show: true + server_time_zone: + type: "string" + title: "Configured server timezone for the MySQL source (Advanced)" + description: + "Enter the configured MySQL server timezone. This should\ + \ only be done if the configured timezone in your MySQL instance\ + \ does not conform to IANNA standard." + order: 2 + always_show: true + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 3 + always_show: true + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 4 + always_show: true + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-rollbar: + type: "object" + required: + - "project_access_token" + - "start_date" + - "account_access_token" + - "sourceType" + properties: + project_access_token: + type: "string" + name: "api_key" + title: "Project Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + account_access_token: + type: "string" + title: "Account Access Token" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "rollbar" + const: "rollbar" + enum: + - "rollbar" + order: 0 + type: "string" + source-rollbar-update: + type: "object" + required: + - "project_access_token" + - "start_date" + - "account_access_token" + properties: + project_access_token: + type: "string" + name: "api_key" + title: "Project Access Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + account_access_token: + type: "string" + title: "Account Access Token" + airbyte_secret: true + order: 2 + source-emailoctopus: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "EmailOctopus API key" + description: + "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "emailoctopus" + const: "emailoctopus" + enum: + - "emailoctopus" + order: 0 + type: "string" + source-emailoctopus-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "EmailOctopus API key" + description: + "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + order: 0 + source-railz: + title: "Railz Spec" + type: "object" + required: + - "client_id" + - "secret_key" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID (client_id)" + order: 0 + secret_key: + type: "string" + title: "Secret key" + description: "Secret key (secret_key)" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + description: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + sourceType: + title: "railz" + const: "railz" + enum: + - "railz" + order: 0 + type: "string" + source-railz-update: + title: "Railz Spec" + type: "object" + required: + - "client_id" + - "secret_key" + - "start_date" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID (client_id)" + order: 0 + secret_key: + type: "string" + title: "Secret key" + description: "Secret key (secret_key)" + order: 1 + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + description: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + source-orbit: + type: "object" + required: + - "api_token" + - "workspace" + - "sourceType" + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: + "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + x-speakeasy-param-sensitive: true + workspace: + type: "string" + title: "Workspace" + description: + "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + sourceType: + title: "orbit" + const: "orbit" + enum: + - "orbit" + order: 0 + type: "string" + source-orbit-update: + type: "object" + required: + - "api_token" + - "workspace" + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: + "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + workspace: + type: "string" + title: "Workspace" + description: + "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + source-apify-dataset: + title: "Apify Dataset Spec" + type: "object" + required: + - "token" + - "dataset_id" + - "sourceType" + properties: + token: + type: "string" + title: "API token" + description: + "Personal API token of your Apify account. In Apify Console,\ + \ you can find your API token in the Settings section under the Integrations tab after you login. See\ + \ the Apify Docs for more information." + examples: + - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk" + airbyte_secret: true + x-speakeasy-param-sensitive: true + dataset_id: + type: "string" + title: "Dataset ID" + description: + "ID of the dataset you would like to load to Airbyte. In Apify\ + \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\ + \ for more information." + examples: + - "rHuMdwm6xCFt6WiGU" + sourceType: + title: "apify-dataset" + const: "apify-dataset" + enum: + - "apify-dataset" + order: 0 + type: "string" + source-apify-dataset-update: + title: "Apify Dataset Spec" + type: "object" + required: + - "token" + - "dataset_id" + properties: + token: + type: "string" + title: "API token" + description: + "Personal API token of your Apify account. In Apify Console,\ + \ you can find your API token in the Settings section under the Integrations tab after you login. See\ + \ the Apify Docs for more information." + examples: + - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk" + airbyte_secret: true + dataset_id: + type: "string" + title: "Dataset ID" + description: + "ID of the dataset you would like to load to Airbyte. In Apify\ + \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\ + \ for more information." + examples: + - "rHuMdwm6xCFt6WiGU" + source-confluence: + type: "object" + required: + - "email" + - "api_token" + - "domain_name" + - "sourceType" + properties: + email: + type: "string" + title: "Email" + description: "Your Confluence login email" + examples: + - "abc@example.com" + order: 0 + api_token: + type: "string" + title: "API Token" + description: + "Please follow the Jira confluence for generating an API token:\ + \ generating an API token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + domain_name: + type: "string" + title: "Domain name" + description: "Your Confluence domain name" + order: 2 + sourceType: + title: "confluence" + const: "confluence" + enum: + - "confluence" + order: 0 + type: "string" + source-confluence-update: + type: "object" + required: + - "email" + - "api_token" + - "domain_name" + properties: + email: + type: "string" + title: "Email" + description: "Your Confluence login email" + examples: + - "abc@example.com" + order: 0 + api_token: + type: "string" + title: "API Token" + description: + "Please follow the Jira confluence for generating an API token:\ + \ generating an API token." + airbyte_secret: true + order: 1 + domain_name: + type: "string" + title: "Domain name" + description: "Your Confluence domain name" + order: 2 + source-coin-api: + title: "Coin API Spec" + type: "object" + required: + - "api_key" + - "environment" + - "symbol_id" + - "period" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + environment: + type: "string" + description: "The environment to use. Either sandbox or production.\n" + enum: + - "sandbox" + - "production" + default: "sandbox" + order: 1 + symbol_id: + type: "string" + description: + "The symbol ID to use. See the documentation for a list.\n\ + https://docs.coinapi.io/#list-all-symbols-get\n" + order: 2 + period: + type: "string" + description: "The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get" + examples: + - "5SEC" + - "2MTH" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: "The start date in ISO 8601 format." + examples: + - "2019-01-01T00:00:00" + end_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: + "The end date in ISO 8601 format. If not supplied, data will\ + \ be returned\nfrom the start date to the current time, or when the count\ + \ of result\nelements reaches its limit.\n" + examples: + - "2019-01-01T00:00:00" + limit: + type: "integer" + description: + "The maximum number of elements to return. If not supplied,\ + \ the default\nis 100. For numbers larger than 100, each 100 items is\ + \ counted as one\nrequest for pricing purposes. Maximum value is 100000.\n" + minimum: 1 + maximum: 100000 + default: 100 + sourceType: + title: "coin-api" + const: "coin-api" + enum: + - "coin-api" + order: 0 + type: "string" + source-coin-api-update: + title: "Coin API Spec" + type: "object" + required: + - "api_key" + - "environment" + - "symbol_id" + - "period" + - "start_date" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + environment: + type: "string" + description: "The environment to use. Either sandbox or production.\n" + enum: + - "sandbox" + - "production" + default: "sandbox" + order: 1 + symbol_id: + type: "string" + description: + "The symbol ID to use. See the documentation for a list.\n\ + https://docs.coinapi.io/#list-all-symbols-get\n" + order: 2 + period: + type: "string" + description: "The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get" + examples: + - "5SEC" + - "2MTH" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: "The start date in ISO 8601 format." + examples: + - "2019-01-01T00:00:00" + end_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: + "The end date in ISO 8601 format. If not supplied, data will\ + \ be returned\nfrom the start date to the current time, or when the count\ + \ of result\nelements reaches its limit.\n" + examples: + - "2019-01-01T00:00:00" + limit: + type: "integer" + description: + "The maximum number of elements to return. If not supplied,\ + \ the default\nis 100. For numbers larger than 100, each 100 items is\ + \ counted as one\nrequest for pricing purposes. Maximum value is 100000.\n" + minimum: 1 + maximum: 100000 + default: 100 + source-orb: + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Orb API Key" + description: "Orb API Key, issued from the Orb admin console." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at before this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2022-03-01T00:00:00Z" + order: 1 + end_date: + type: "string" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at after this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2024-03-01T00:00:00Z" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N days. By default, updated objects that are\ + \ not newly created are not incrementally synced." + order: 3 + string_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (string values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 4 + numeric_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (numeric values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 5 + subscription_usage_grouping_key: + type: "string" + title: "Subscription usage grouping key (string value)" + description: "Property key name to group subscription usage by." + order: 6 + plan_id: + type: "string" + title: "Orb Plan ID for Subscription Usage (string value)" + description: + "Orb Plan ID to filter subscriptions that should have usage\ + \ fetched." + order: 7 + sourceType: + title: "orb" + const: "orb" + enum: + - "orb" + order: 0 + type: "string" + source-orb-update: + type: "object" + required: + - "start_date" + - "api_key" + properties: + api_key: + type: "string" + title: "Orb API Key" + description: "Orb API Key, issued from the Orb admin console." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at before this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2022-03-01T00:00:00Z" + order: 1 + end_date: + type: "string" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at after this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2024-03-01T00:00:00Z" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N days. By default, updated objects that are\ + \ not newly created are not incrementally synced." + order: 3 + string_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (string values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 4 + numeric_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (numeric values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 5 + subscription_usage_grouping_key: + type: "string" + title: "Subscription usage grouping key (string value)" + description: "Property key name to group subscription usage by." + order: 6 + plan_id: + type: "string" + title: "Orb Plan ID for Subscription Usage (string value)" + description: + "Orb Plan ID to filter subscriptions that should have usage\ + \ fetched." + order: 7 + source-sentry: + title: "Sentry Spec" + type: "object" + required: + - "auth_token" + - "organization" + - "project" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication Tokens" + description: + "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\ + \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\"" + airbyte_secret: true + x-speakeasy-param-sensitive: true + hostname: + type: "string" + title: "Host Name" + description: + "Host name of Sentry API server.For self-hosted, specify your\ + \ host name here. Otherwise, leave it empty." + default: "sentry.io" + organization: + type: "string" + title: "Organization" + description: "The slug of the organization the groups belong to." + project: + type: "string" + title: "Project" + description: "The name (slug) of the Project you want to sync." + discover_fields: + type: "array" + item: "string" + title: "Discover Event Fields" + description: "Fields to retrieve when fetching discover events" + sourceType: + title: "sentry" + const: "sentry" + enum: + - "sentry" + order: 0 + type: "string" + source-sentry-update: + title: "Sentry Spec" + type: "object" + required: + - "auth_token" + - "organization" + - "project" + properties: + auth_token: + type: "string" + title: "Authentication Tokens" + description: + "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\ + \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\"" + airbyte_secret: true + hostname: + type: "string" + title: "Host Name" + description: + "Host name of Sentry API server.For self-hosted, specify your\ + \ host name here. Otherwise, leave it empty." + default: "sentry.io" + organization: + type: "string" + title: "Organization" + description: "The slug of the organization the groups belong to." + project: + type: "string" + title: "Project" + description: "The name (slug) of the Project you want to sync." + discover_fields: + type: "array" + item: "string" + title: "Discover Event Fields" + description: "Fields to retrieve when fetching discover events" + source-notion: + title: "Notion Source Spec" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\ + \ During incremental sync, any data generated before this date will not\ + \ be replicated. If left blank, the start date will be set to 2 years\ + \ before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-11-16T00:00:00.000Z" + type: "string" + format: "date-time" + credentials: + title: "Authentication Method" + description: + "Choose either OAuth (recommended for Airbyte Cloud) or Access\ + \ Token. See our docs\ + \ for more information." + type: "object" + order: 1 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "The Access Token received by completing the OAuth flow\ + \ for your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access Token" + required: + - "auth_type" + - "token" + properties: + auth_type: + type: "string" + const: "token" + enum: + - "token" + token: + title: "Access Token" + description: + "The Access Token for your private Notion integration.\ + \ See the docs\ + \ for more information on how to obtain this token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "notion" + const: "notion" + enum: + - "notion" + order: 0 + type: "string" + source-notion-update: + title: "Notion Source Spec" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\ + \ During incremental sync, any data generated before this date will not\ + \ be replicated. If left blank, the start date will be set to 2 years\ + \ before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-11-16T00:00:00.000Z" + type: "string" + format: "date-time" + credentials: + title: "Authentication Method" + description: + "Choose either OAuth (recommended for Airbyte Cloud) or Access\ + \ Token. See our docs\ + \ for more information." + type: "object" + order: 1 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "The Access Token received by completing the OAuth flow\ + \ for your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + - type: "object" + title: "Access Token" + required: + - "auth_type" + - "token" + properties: + auth_type: + type: "string" + const: "token" + enum: + - "token" + token: + title: "Access Token" + description: + "The Access Token for your private Notion integration.\ + \ See the docs\ + \ for more information on how to obtain this token." + type: "string" + airbyte_secret: true + source-trustpilot: + title: "Trustpilot Spec" + type: "object" + required: + - "credentials" + - "business_units" + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth 2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "API key" + description: + "The API key of the Trustpilot API application. (represents\ + \ the OAuth Client ID)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Secret" + description: + "The Secret of the Trustpilot API application. (represents\ + \ the OAuth Client Secret)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + title: "Token expiry date time" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Key" + description: + "The API key authentication method gives you access to only\ + \ the streams which are part of the Public API. When you want to get\ + \ streams available via the Consumer API (e.g. the private reviews)\ + \ you need to use authentication method OAuth 2.0." + required: + - "client_id" + properties: + auth_type: + type: "string" + const: "apikey" + enum: + - "apikey" + client_id: + type: "string" + title: "API key" + description: "The API key of the Trustpilot API application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + business_units: + type: "array" + items: + type: "string" + title: "Business Unit names" + description: + "The names of business units which shall be synchronized. Some\ + \ streams e.g. configured_business_units or private_reviews use this configuration." + examples: + - "mydomain.com" + - "www.mydomain.com" + start_date: + type: "string" + title: "Start Date" + description: + "For streams with sync. method incremental the start date time\ + \ to be used" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "%Y-%m-%dT%H:%M:%SZ" + sourceType: + title: "trustpilot" + const: "trustpilot" + enum: + - "trustpilot" + order: 0 + type: "string" + source-trustpilot-update: + title: "Trustpilot Spec" + type: "object" + required: + - "credentials" + - "business_units" + - "start_date" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth 2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "API key" + description: + "The API key of the Trustpilot API application. (represents\ + \ the OAuth Client ID)" + airbyte_secret: true + client_secret: + type: "string" + title: "Secret" + description: + "The Secret of the Trustpilot API application. (represents\ + \ the OAuth Client Secret)" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + title: "Token expiry date time" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - type: "object" + title: "API Key" + description: + "The API key authentication method gives you access to only\ + \ the streams which are part of the Public API. When you want to get\ + \ streams available via the Consumer API (e.g. the private reviews)\ + \ you need to use authentication method OAuth 2.0." + required: + - "client_id" + properties: + auth_type: + type: "string" + const: "apikey" + enum: + - "apikey" + client_id: + type: "string" + title: "API key" + description: "The API key of the Trustpilot API application." + airbyte_secret: true + business_units: + type: "array" + items: + type: "string" + title: "Business Unit names" + description: + "The names of business units which shall be synchronized. Some\ + \ streams e.g. configured_business_units or private_reviews use this configuration." + examples: + - "mydomain.com" + - "www.mydomain.com" + start_date: + type: "string" + title: "Start Date" + description: + "For streams with sync. method incremental the start date time\ + \ to be used" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "%Y-%m-%dT%H:%M:%SZ" + source-google-webfonts: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + order: 0 + x-speakeasy-param-sensitive: true + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + order: 1 + prettyPrint: + type: "string" + description: "Optional, boolean type" + order: 2 + sort: + type: "string" + description: "Optional, to find how to sort" + order: 3 + sourceType: + title: "google-webfonts" + const: "google-webfonts" + enum: + - "google-webfonts" + order: 0 + type: "string" + source-google-webfonts-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + order: 0 + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + order: 1 + prettyPrint: + type: "string" + description: "Optional, boolean type" + order: 2 + sort: + type: "string" + description: "Optional, to find how to sort" + order: 3 + source-pypi: + type: "object" + required: + - "project_name" + - "sourceType" + properties: + version: + type: "string" + title: "Package Version" + description: + "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + order: 1 + project_name: + type: "string" + title: "PyPI Package" + description: + "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + order: 0 + sourceType: + title: "pypi" + const: "pypi" + enum: + - "pypi" + order: 0 + type: "string" + source-pypi-update: + type: "object" + required: + - "project_name" + properties: + version: + type: "string" + title: "Package Version" + description: + "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + order: 1 + project_name: + type: "string" + title: "PyPI Package" + description: + "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + order: 0 + source-slack: + title: "Slack Spec" + type: "object" + required: + - "start_date" + - "lookback_window" + - "join_channels" + - "sourceType" + properties: + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + title: "Start Date" + format: "date-time" + lookback_window: + type: "integer" + title: "Threads Lookback window (Days)" + description: + "How far into the past to look for messages in threads, default\ + \ is 0 days" + examples: + - 7 + - 14 + minimum: 0 + default: 0 + maximum: 365 + join_channels: + type: "boolean" + default: true + title: "Join all channels" + description: + "Whether to join all channels or to sync data only from channels\ + \ the bot is already in. If false, you'll need to manually add the bot\ + \ to all the channels from which you'd like to sync messages. " + include_private_channels: + type: "boolean" + default: false + title: "Include private channels" + description: + "Whether to read information from private channels that the\ + \ bot is already in. If false, only public channels will be read. If\ + \ true, the bot must be manually added to private channels. " + channel_filter: + type: "array" + default: [] + items: + type: "string" + minLength: 0 + title: "Channel name filter" + description: + "A channel name list (without leading '#' char) which limit\ + \ the channels from which you'd like to sync. Empty list means no filter." + examples: + - "channel_one" + - "channel_two" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate into Slack" + type: "object" + oneOf: + - type: "object" + title: "Sign in via Slack (OAuth)" + required: + - "option_title" + - "client_id" + - "client_secret" + - "access_token" + properties: + option_title: + type: "string" + const: "Default OAuth2.0 authorization" + enum: + - "Default OAuth2.0 authorization" + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access token" + description: + "Slack access_token. See our docs if you need help generating the token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + - type: "object" + title: "API Token" + required: + - "option_title" + - "api_token" + properties: + option_title: + type: "string" + const: "API Token Credentials" + enum: + - "API Token Credentials" + api_token: + type: "string" + title: "API Token" + description: + "A Slack bot token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 1 + sourceType: + title: "slack" + const: "slack" + enum: + - "slack" + order: 0 + type: "string" + source-slack-update: + title: "Slack Spec" + type: "object" + required: + - "start_date" + - "lookback_window" + - "join_channels" + properties: + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + title: "Start Date" + format: "date-time" + lookback_window: + type: "integer" + title: "Threads Lookback window (Days)" + description: + "How far into the past to look for messages in threads, default\ + \ is 0 days" + examples: + - 7 + - 14 + minimum: 0 + default: 0 + maximum: 365 + join_channels: + type: "boolean" + default: true + title: "Join all channels" + description: + "Whether to join all channels or to sync data only from channels\ + \ the bot is already in. If false, you'll need to manually add the bot\ + \ to all the channels from which you'd like to sync messages. " + include_private_channels: + type: "boolean" + default: false + title: "Include private channels" + description: + "Whether to read information from private channels that the\ + \ bot is already in. If false, only public channels will be read. If\ + \ true, the bot must be manually added to private channels. " + channel_filter: + type: "array" + default: [] + items: + type: "string" + minLength: 0 + title: "Channel name filter" + description: + "A channel name list (without leading '#' char) which limit\ + \ the channels from which you'd like to sync. Empty list means no filter." + examples: + - "channel_one" + - "channel_two" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate into Slack" + type: "object" + oneOf: + - type: "object" + title: "Sign in via Slack (OAuth)" + required: + - "option_title" + - "client_id" + - "client_secret" + - "access_token" + properties: + option_title: + type: "string" + const: "Default OAuth2.0 authorization" + enum: + - "Default OAuth2.0 authorization" + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + access_token: + type: "string" + title: "Access token" + description: + "Slack access_token. See our docs if you need help generating the token." + airbyte_secret: true + order: 0 + - type: "object" + title: "API Token" + required: + - "option_title" + - "api_token" + properties: + option_title: + type: "string" + const: "API Token Credentials" + enum: + - "API Token Credentials" + api_token: + type: "string" + title: "API Token" + description: + "A Slack bot token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + source-file: + title: "File Source Spec" + type: "object" + required: + - "dataset_name" + - "format" + - "url" + - "provider" + - "sourceType" + properties: + dataset_name: + type: "string" + title: "Dataset Name" + description: + "The Name of the final table to replicate this file into (should\ + \ include letters, numbers dash and underscores only)." + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "excel_binary" + - "fwf" + - "feather" + - "parquet" + - "yaml" + default: "csv" + title: "File Format" + description: + "The Format of the file which should be replicated (Warning:\ + \ some formats may be experimental, please refer to the docs)." + reader_options: + type: "string" + title: "Reader Options" + description: + "This should be a string in JSON format. It depends on the\ + \ chosen file format to provide additional options and tune its behavior." + examples: + - "{}" + - '{"sep": " "}' + - "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"\ + ] }" + url: + type: "string" + title: "URL" + description: "The URL path to access the file which should be replicated." + examples: + - "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" + - "gs://my-google-bucket/data.csv" + - "s3://gdelt-open-data/events/20190914.export.csv" + provider: + type: "object" + title: "Storage Provider" + description: + "The storage Provider or Location of the file(s) which should\ + \ be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + const: "HTTPS" + enum: + - "HTTPS" + user_agent: + type: "boolean" + title: "User-Agent" + default: false + description: "Add User-Agent to request" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "GCS" + enum: + - "GCS" + service_account_json: + type: "string" + title: "Service Account JSON" + airbyte_secret: true + description: + "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + x-speakeasy-param-sensitive: true + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "S3" + enum: + - "S3" + aws_access_key_id: + type: "string" + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + title: "Storage" + const: "AzBlob" + enum: + - "AzBlob" + storage_account: + type: "string" + title: "Storage Account" + description: + "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + title: "SAS Token" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + shared_key: + type: "string" + title: "Shared Key" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SSH" + enum: + - "SSH" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SCP" + enum: + - "SCP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SFTP" + enum: + - "SFTP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + description: + "WARNING: Note that the local storage URL available for\ + \ reading must start with the local mount \"/local/\" at the moment\ + \ until we implement more advanced docker mounting options." + const: "local" + enum: + - "local" + sourceType: + title: "file" + const: "file" + enum: + - "file" + order: 0 + type: "string" + source-file-update: + title: "File Source Spec" + type: "object" + required: + - "dataset_name" + - "format" + - "url" + - "provider" + properties: + dataset_name: + type: "string" + title: "Dataset Name" + description: + "The Name of the final table to replicate this file into (should\ + \ include letters, numbers dash and underscores only)." + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "excel_binary" + - "fwf" + - "feather" + - "parquet" + - "yaml" + default: "csv" + title: "File Format" + description: + "The Format of the file which should be replicated (Warning:\ + \ some formats may be experimental, please refer to the docs)." + reader_options: + type: "string" + title: "Reader Options" + description: + "This should be a string in JSON format. It depends on the\ + \ chosen file format to provide additional options and tune its behavior." + examples: + - "{}" + - '{"sep": " "}' + - "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"\ + ] }" + url: + type: "string" + title: "URL" + description: "The URL path to access the file which should be replicated." + examples: + - "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" + - "gs://my-google-bucket/data.csv" + - "s3://gdelt-open-data/events/20190914.export.csv" + provider: + type: "object" + title: "Storage Provider" + description: + "The storage Provider or Location of the file(s) which should\ + \ be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + const: "HTTPS" + enum: + - "HTTPS" + user_agent: + type: "boolean" + title: "User-Agent" + default: false + description: "Add User-Agent to request" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "GCS" + enum: + - "GCS" + service_account_json: + type: "string" + title: "Service Account JSON" + airbyte_secret: true + description: + "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "S3" + enum: + - "S3" + aws_access_key_id: + type: "string" + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + title: "Storage" + const: "AzBlob" + enum: + - "AzBlob" + storage_account: + type: "string" + title: "Storage Account" + description: + "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + title: "SAS Token" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + shared_key: + type: "string" + title: "Shared Key" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SSH" + enum: + - "SSH" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SCP" + enum: + - "SCP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SFTP" + enum: + - "SFTP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + description: + "WARNING: Note that the local storage URL available for\ + \ reading must start with the local mount \"/local/\" at the moment\ + \ until we implement more advanced docker mounting options." + const: "local" + enum: + - "local" + source-lokalise: + type: "object" + required: + - "api_key" + - "project_id" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Lokalise API Key with read-access. Available at Profile settings\ + \ > API tokens. See here." + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + project_id: + type: "string" + description: "Lokalise project ID. Available at Project Settings > General." + title: "Project Id" + order: 1 + sourceType: + title: "lokalise" + const: "lokalise" + enum: + - "lokalise" + order: 0 + type: "string" + source-lokalise-update: + type: "object" + required: + - "api_key" + - "project_id" + properties: + api_key: + type: "string" + description: + "Lokalise API Key with read-access. Available at Profile settings\ + \ > API tokens. See here." + title: "API Key" + airbyte_secret: true + order: 0 + project_id: + type: "string" + description: "Lokalise project ID. Available at Project Settings > General." + title: "Project Id" + order: 1 + source-zoho-crm: + title: "Zoho Crm Configuration" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "environment" + - "dc_region" + - "edition" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "OAuth2.0 Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "OAuth2.0 Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "OAuth2.0 Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + dc_region: + title: "Data Center Location" + type: "string" + description: + "Please choose the region of your Data Center location. More\ + \ info by this Link" + enum: + - "US" + - "AU" + - "EU" + - "IN" + - "CN" + - "JP" + environment: + title: "Environment" + type: "string" + description: "Please choose the environment" + enum: + - "Production" + - "Developer" + - "Sandbox" + start_datetime: + title: "Start Date" + type: + - "string" + - "null" + examples: + - "2000-01-01" + - "2000-01-01 13:00" + - "2000-01-01 13:00:00" + - "2000-01-01T13:00+00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`" + format: "date-time" + edition: + title: "Zoho CRM Edition" + type: "string" + description: + "Choose your Edition of Zoho CRM to determine API Concurrency\ + \ Limits" + enum: + - "Free" + - "Standard" + - "Professional" + - "Enterprise" + - "Ultimate" + default: "Free" + sourceType: + title: "zoho-crm" + const: "zoho-crm" + enum: + - "zoho-crm" + order: 0 + type: "string" + source-zoho-crm-update: + title: "Zoho Crm Configuration" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "environment" + - "dc_region" + - "edition" + properties: + client_id: + type: "string" + title: "Client ID" + description: "OAuth2.0 Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "OAuth2.0 Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "OAuth2.0 Refresh Token" + airbyte_secret: true + dc_region: + title: "Data Center Location" + type: "string" + description: + "Please choose the region of your Data Center location. More\ + \ info by this Link" + enum: + - "US" + - "AU" + - "EU" + - "IN" + - "CN" + - "JP" + environment: + title: "Environment" + type: "string" + description: "Please choose the environment" + enum: + - "Production" + - "Developer" + - "Sandbox" + start_datetime: + title: "Start Date" + type: + - "string" + - "null" + examples: + - "2000-01-01" + - "2000-01-01 13:00" + - "2000-01-01 13:00:00" + - "2000-01-01T13:00+00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`" + format: "date-time" + edition: + title: "Zoho CRM Edition" + type: "string" + description: + "Choose your Edition of Zoho CRM to determine API Concurrency\ + \ Limits" + enum: + - "Free" + - "Standard" + - "Professional" + - "Enterprise" + - "Ultimate" + default: "Free" + source-gainsight-px: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "The Aptrinsic API Key which is recieved from the dashboard\ + \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "gainsight-px" + const: "gainsight-px" + enum: + - "gainsight-px" + order: 0 + type: "string" + source-gainsight-px-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "The Aptrinsic API Key which is recieved from the dashboard\ + \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" + order: 0 + source-stripe: + title: "Stripe Source Spec" + type: "object" + required: + - "client_secret" + - "account_id" + - "sourceType" + properties: + account_id: + type: "string" + title: "Account ID" + description: + "Your Stripe account ID (starts with 'acct_', find yours here)." + order: 0 + client_secret: + type: "string" + title: "Secret Key" + description: + "Stripe API key (usually starts with 'sk_live_'; find yours\ + \ here)." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ + \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window in days" + default: 0 + minimum: 0 + description: + "When set, the connector will always re-export data from the\ + \ past N days, where N is the value set here. This is useful if your data\ + \ is frequently updated after creation. The Lookback Window only applies\ + \ to streams that do not support event-based incremental syncs: Events,\ + \ SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks,\ + \ Refunds. More info here" + order: 3 + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 365 + minimum: 1 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + description: + "The time increment used by the connector when requesting data\ + \ from the Stripe API. The bigger the value is, the less requests will\ + \ be made and faster the sync will be. On the other hand, the more seldom\ + \ the state is persisted." + order: 4 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 20 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker thread to use for the sync. The performance\ + \ upper boundary depends on call_rate_limit setting and type of account." + order: 5 + call_rate_limit: + type: "integer" + title: "Max number of API calls per second" + examples: + - 25 + - 100 + description: + "The number of API calls per second that you allow connector\ + \ to make. This value can not be bigger than real API call rate limit\ + \ (https://stripe.com/docs/rate-limits). If not specified the default\ + \ maximum is 25 and 100 calls per second for test and production tokens\ + \ respectively." + sourceType: + title: "stripe" + const: "stripe" + enum: + - "stripe" + order: 0 + type: "string" + source-stripe-update: + title: "Stripe Source Spec" + type: "object" + required: + - "client_secret" + - "account_id" + properties: + account_id: + type: "string" + title: "Account ID" + description: + "Your Stripe account ID (starts with 'acct_', find yours here)." + order: 0 + client_secret: + type: "string" + title: "Secret Key" + description: + "Stripe API key (usually starts with 'sk_live_'; find yours\ + \ here)." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ + \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window in days" + default: 0 + minimum: 0 + description: + "When set, the connector will always re-export data from the\ + \ past N days, where N is the value set here. This is useful if your data\ + \ is frequently updated after creation. The Lookback Window only applies\ + \ to streams that do not support event-based incremental syncs: Events,\ + \ SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks,\ + \ Refunds. More info here" + order: 3 + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 365 + minimum: 1 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + description: + "The time increment used by the connector when requesting data\ + \ from the Stripe API. The bigger the value is, the less requests will\ + \ be made and faster the sync will be. On the other hand, the more seldom\ + \ the state is persisted." + order: 4 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 20 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker thread to use for the sync. The performance\ + \ upper boundary depends on call_rate_limit setting and type of account." + order: 5 + call_rate_limit: + type: "integer" + title: "Max number of API calls per second" + examples: + - 25 + - 100 + description: + "The number of API calls per second that you allow connector\ + \ to make. This value can not be bigger than real API call rate limit\ + \ (https://stripe.com/docs/rate-limits). If not specified the default\ + \ maximum is 25 and 100 calls per second for test and production tokens\ + \ respectively." + source-buzzsprout: + type: "object" + required: + - "api_key" + - "podcast_id" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + podcast_id: + type: "string" + description: "Podcast ID found in `https://www.buzzsprout.com/my/profile/api`" + title: "Podcast ID" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "buzzsprout" + const: "buzzsprout" + enum: + - "buzzsprout" + order: 0 + type: "string" + source-buzzsprout-update: + type: "object" + required: + - "api_key" + - "podcast_id" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + podcast_id: + type: "string" + description: "Podcast ID found in `https://www.buzzsprout.com/my/profile/api`" + title: "Podcast ID" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-youtube-analytics: + title: "YouTube Analytics Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "youtube-analytics" + const: "youtube-analytics" + enum: + - "youtube-analytics" + order: 0 + type: "string" + source-youtube-analytics-update: + title: "YouTube Analytics Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + source-google-sheets: + title: "Google Sheets Source Spec" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + - "sourceType" + properties: + batch_size: + type: "integer" + title: "Row Batch Size" + description: + "Default value is 200. An integer representing row batch size\ + \ for each sent request to Google Sheets API. Row batch size means how\ + \ many rows are processed from the google sheet, for example default value\ + \ 200 would process rows 1-201, then 201-401 and so on. Based on Google\ + \ Sheets API limits documentation, it is possible to send up to 300\ + \ requests per minute, but each individual request has to be processed\ + \ under 180 seconds, otherwise the request returns a timeout error. In\ + \ regards to this information, consider network speed and number of columns\ + \ of the google sheet when deciding a batch_size value. Default value\ + \ should cover most of the cases, but if a google sheet has over 100,000\ + \ records or more, consider increasing batch_size value." + default: 200 + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "Enter the link to the Google spreadsheet you want to sync.\ + \ To copy the link, click the 'Share' button in the top-right corner of\ + \ the spreadsheet, then click 'Copy link'." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" + names_conversion: + type: "boolean" + title: "Convert Column Names to SQL-Compliant Format" + description: + "Enables the conversion of column names to a standardized,\ + \ SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this\ + \ option if your destination is SQL-based." + default: false + credentials: + type: "object" + title: "Authentication" + description: "Credentials for connecting to the Google Sheets API" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Enter your Google application's refresh token. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + service_account_info: + type: "string" + title: "Service Account Information." + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + x-speakeasy-param-sensitive: true + sourceType: + title: "google-sheets" + const: "google-sheets" + enum: + - "google-sheets" + order: 0 + type: "string" + source-google-sheets-update: + title: "Google Sheets Source Spec" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + properties: + batch_size: + type: "integer" + title: "Row Batch Size" + description: + "Default value is 200. An integer representing row batch size\ + \ for each sent request to Google Sheets API. Row batch size means how\ + \ many rows are processed from the google sheet, for example default value\ + \ 200 would process rows 1-201, then 201-401 and so on. Based on Google\ + \ Sheets API limits documentation, it is possible to send up to 300\ + \ requests per minute, but each individual request has to be processed\ + \ under 180 seconds, otherwise the request returns a timeout error. In\ + \ regards to this information, consider network speed and number of columns\ + \ of the google sheet when deciding a batch_size value. Default value\ + \ should cover most of the cases, but if a google sheet has over 100,000\ + \ records or more, consider increasing batch_size value." + default: 200 + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "Enter the link to the Google spreadsheet you want to sync.\ + \ To copy the link, click the 'Share' button in the top-right corner of\ + \ the spreadsheet, then click 'Copy link'." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" + names_conversion: + type: "boolean" + title: "Convert Column Names to SQL-Compliant Format" + description: + "Enables the conversion of column names to a standardized,\ + \ SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this\ + \ option if your destination is SQL-based." + default: false + credentials: + type: "object" + title: "Authentication" + description: "Credentials for connecting to the Google Sheets API" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Enter your Google application's refresh token. See Google's\ + \ documentation for more information." + airbyte_secret: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + service_account_info: + type: "string" + title: "Service Account Information." + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + source-zendesk-talk: + type: "object" + title: "Source Zendesk Talk Spec" + required: + - "start_date" + - "subdomain" + - "sourceType" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: + "This is your Zendesk subdomain that can be found in your account\ + \ URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN\ + \ is the value of your subdomain." + credentials: + title: "Authentication" + type: "object" + order: 1 + description: + "Zendesk service provides two authentication methods. Choose\ + \ between: `OAuth2.0` or `API token`." + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "api_token" + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + examples: + - "2020-10-15T00:00:00Z" + sourceType: + title: "zendesk-talk" + const: "zendesk-talk" + enum: + - "zendesk-talk" + order: 0 + type: "string" + source-zendesk-talk-update: + type: "object" + title: "Source Zendesk Talk Spec" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: + "This is your Zendesk subdomain that can be found in your account\ + \ URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN\ + \ is the value of your subdomain." + credentials: + title: "Authentication" + type: "object" + order: 1 + description: + "Zendesk service provides two authentication methods. Choose\ + \ between: `OAuth2.0` or `API token`." + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "api_token" + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + examples: + - "2020-10-15T00:00:00Z" + source-freshdesk: + type: "object" + required: + - "api_key" + - "domain" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain: + type: "string" + order: 2 + title: "Domain" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: "integer" + order: 3 + title: "Requests per minute" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: + "UTC date and time. Any data created after this date will be\ + \ replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: "integer" + order: 5 + title: "Lookback Window" + default: 14 + description: + "Number of days for lookback window for the stream Satisfaction\ + \ Ratings" + sourceType: + title: "freshdesk" + const: "freshdesk" + enum: + - "freshdesk" + order: 0 + type: "string" + source-freshdesk-update: + type: "object" + required: + - "api_key" + - "domain" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + domain: + type: "string" + order: 2 + title: "Domain" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: "integer" + order: 3 + title: "Requests per minute" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: + "UTC date and time. Any data created after this date will be\ + \ replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: "integer" + order: 5 + title: "Lookback Window" + default: 14 + description: + "Number of days for lookback window for the stream Satisfaction\ + \ Ratings" + source-asana: + title: "Asana Spec" + type: "object" + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: + "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_export_ids: + title: "Organization Export IDs" + description: "Globally unique identifiers for the organization exports" + type: "array" + sourceType: + title: "asana" + const: "asana" + enum: + - "asana" + order: 0 + type: "string" + source-asana-update: + title: "Asana Spec" + type: "object" + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: + "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + organization_export_ids: + title: "Organization Export IDs" + description: "Globally unique identifiers for the organization exports" + type: "array" + source-posthog: + title: "PostHog Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + start_date: + title: "Start Date" + type: "string" + description: + "The date from which you'd like to replicate the data. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + api_key: + type: "string" + airbyte_secret: true + title: "API Key" + description: + "API Key. See the docs for information on how to generate this key." + x-speakeasy-param-sensitive: true + base_url: + type: "string" + default: "https://app.posthog.com" + title: "Base URL" + description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." + examples: + - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 + sourceType: + title: "posthog" + const: "posthog" + enum: + - "posthog" + order: 0 + type: "string" + source-posthog-update: + title: "PostHog Spec" + type: "object" + required: + - "api_key" + - "start_date" + properties: + start_date: + title: "Start Date" + type: "string" + description: + "The date from which you'd like to replicate the data. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + api_key: + type: "string" + airbyte_secret: true + title: "API Key" + description: + "API Key. See the docs for information on how to generate this key." + base_url: + type: "string" + default: "https://app.posthog.com" + title: "Base URL" + description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." + examples: + - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 + source-split-io: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "split-io" + const: "split-io" + enum: + - "split-io" + order: 0 + type: "string" + source-split-io-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-getlago: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_url: + type: "string" + description: "Your Lago API URL" + title: "API Url" + default: "https://api.getlago.com/api/v1" + order: 0 + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "getlago" + const: "getlago" + enum: + - "getlago" + order: 0 + type: "string" + source-getlago-update: + type: "object" + required: + - "api_key" + properties: + api_url: + type: "string" + description: "Your Lago API URL" + title: "API Url" + default: "https://api.getlago.com/api/v1" + order: 0 + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 1 + source-gridly: + title: "Gridly Spec" + type: "object" + required: + - "api_key" + - "grid_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + grid_id: + type: "string" + title: "Grid ID" + description: "ID of a grid, or can be ID of a branch" + sourceType: + title: "gridly" + const: "gridly" + enum: + - "gridly" + order: 0 + type: "string" + source-gridly-update: + title: "Gridly Spec" + type: "object" + required: + - "api_key" + - "grid_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + grid_id: + type: "string" + title: "Grid ID" + description: "ID of a grid, or can be ID of a branch" + source-microsoft-teams: + title: "Microsoft Teams Spec" + type: "object" + required: + - "period" + - "sourceType" + properties: + period: + type: "string" + title: "Period" + description: + "Specifies the length of time over which the Team Device Report\ + \ stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - "D7" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Microsoft" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Microsoft (OAuth 2.0)" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A Refresh Token to renew the expired Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate via Microsoft" + required: + - "tenant_id" + - "client_id" + - "client_secret" + properties: + auth_type: + type: "string" + const: "Token" + enum: + - "Token" + default: "Token" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "microsoft-teams" + const: "microsoft-teams" + enum: + - "microsoft-teams" + order: 0 + type: "string" + source-microsoft-teams-update: + title: "Microsoft Teams Spec" + type: "object" + required: + - "period" + properties: + period: + type: "string" + title: "Period" + description: + "Specifies the length of time over which the Team Device Report\ + \ stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - "D7" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Microsoft" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Microsoft (OAuth 2.0)" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: "object" + title: "Authenticate via Microsoft" + required: + - "tenant_id" + - "client_id" + - "client_secret" + properties: + auth_type: + type: "string" + const: "Token" + enum: + - "Token" + default: "Token" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + source-looker: + type: "object" + required: + - "client_id" + - "client_secret" + - "domain" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + description: + "The Client ID is first part of an API3 key that is specific\ + \ to each Looker user. See the docs for more information on how to generate this key." + client_secret: + type: "string" + order: 1 + title: "Client Secret" + description: "The Client Secret is second part of an API3 key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain: + type: "string" + order: 2 + title: "Domain" + examples: + - "domainname.looker.com" + - "looker.clientname.com" + - "123.123.124.123:8000" + description: + "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\ + \ address" + run_look_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + order: 3 + title: "Look IDs to Run" + description: "The IDs of any Looks to run" + sourceType: + title: "looker" + const: "looker" + enum: + - "looker" + order: 0 + type: "string" + source-looker-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "domain" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + description: + "The Client ID is first part of an API3 key that is specific\ + \ to each Looker user. See the docs for more information on how to generate this key." + client_secret: + type: "string" + order: 1 + title: "Client Secret" + description: "The Client Secret is second part of an API3 key." + airbyte_secret: true + domain: + type: "string" + order: 2 + title: "Domain" + examples: + - "domainname.looker.com" + - "looker.clientname.com" + - "123.123.124.123:8000" + description: + "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\ + \ address" + run_look_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + order: 3 + title: "Look IDs to Run" + description: "The IDs of any Looks to run" + source-dropbox-sign: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://app.hellosign.com/home/myAccount#api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "dropbox-sign" + const: "dropbox-sign" + enum: + - "dropbox-sign" + order: 0 + type: "string" + source-dropbox-sign-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://app.hellosign.com/home/myAccount#api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-google-tasks: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + records_limit: + type: "string" + description: "The maximum number of records to be returned per request" + order: 0 + title: "Records Limit" + default: "50" + sourceType: + title: "google-tasks" + const: "google-tasks" + enum: + - "google-tasks" + order: 0 + type: "string" + source-google-tasks-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + records_limit: + type: "string" + description: "The maximum number of records to be returned per request" + order: 0 + title: "Records Limit" + default: "50" + source-amazon-seller-partner: + title: "Amazon Seller Partner Spec" + type: "object" + required: + - "aws_environment" + - "region" + - "account_type" + - "lwa_app_id" + - "lwa_client_secret" + - "refresh_token" + - "sourceType" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + aws_environment: + title: "AWS Environment" + description: "Select the AWS Environment." + enum: + - "PRODUCTION" + - "SANDBOX" + default: "PRODUCTION" + type: "string" + order: 1 + region: + title: "AWS Region" + description: "Select the AWS Region." + enum: + - "AE" + - "AU" + - "BE" + - "BR" + - "CA" + - "DE" + - "EG" + - "ES" + - "FR" + - "GB" + - "IN" + - "IT" + - "JP" + - "MX" + - "NL" + - "PL" + - "SA" + - "SE" + - "SG" + - "TR" + - "UK" + - "US" + default: "US" + type: "string" + order: 2 + account_type: + title: "AWS Seller Partner Account Type" + description: + "Type of the Account you're going to authorize the Airbyte\ + \ application by" + enum: + - "Seller" + - "Vendor" + default: "Seller" + type: "string" + order: 3 + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "The Refresh Token obtained via OAuth flow authorization." + airbyte_secret: true + order: 6 + type: "string" + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If start date is not provided\ + \ or older than 2 years ago from today, the date 2 years ago from today\ + \ will be used." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + order: 7 + type: "string" + format: "date-time" + replication_end_date: + title: "End Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$" + examples: + - "2017-01-25T00:00:00Z" + order: 8 + type: "string" + format: "date-time" + period_in_days: + title: "Period In Days" + type: "integer" + description: + "For syncs spanning a large date range, this option is used\ + \ to request data in a smaller fixed window to improve sync reliability.\ + \ This time window can be configured granularly by day." + default: 90 + minimum: 1 + order: 9 + report_options_list: + title: "Report Options" + description: + "Additional information passed to reports. This varies by report\ + \ type." + order: 10 + type: "array" + items: + type: "object" + title: "Report Options" + required: + - "report_name" + - "stream_name" + - "options_list" + properties: + report_name: + title: "Report Name" + type: "string" + order: 0 + enum: + - "GET_AFN_INVENTORY_DATA" + - "GET_AFN_INVENTORY_DATA_BY_COUNTRY" + - "GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL" + - "GET_FBA_ESTIMATED_FBA_FEES_TXT_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_PROMOTION_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_REPLACEMENT_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_ORDER_DETAIL_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_SHIPMENT_DETAIL_DATA" + - "GET_FBA_INVENTORY_PLANNING_DATA" + - "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA" + - "GET_FBA_REIMBURSEMENTS_DATA" + - "GET_FBA_SNS_FORECAST_DATA" + - "GET_FBA_SNS_PERFORMANCE_DATA" + - "GET_FBA_STORAGE_FEE_CHARGES_DATA" + - "GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_FLAT_FILE_ARCHIVED_ORDERS_DATA_BY_ORDER_DATE" + - "GET_FLAT_FILE_OPEN_LISTINGS_DATA" + - "GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE" + - "GET_LEDGER_DETAIL_VIEW_DATA" + - "GET_LEDGER_SUMMARY_VIEW_DATA" + - "GET_MERCHANT_CANCELLED_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_ALL_DATA" + - "GET_MERCHANT_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT" + - "GET_MERCHANT_LISTINGS_INACTIVE_DATA" + - "GET_MERCHANTS_LISTINGS_FYP_REPORT" + - "GET_ORDER_REPORT_DATA_SHIPPING" + - "GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT" + - "GET_SELLER_FEEDBACK_DATA" + - "GET_STRANDED_INVENTORY_UI_DATA" + - "GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE" + - "GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_XML_BROWSE_TREE_DATA" + - "GET_VENDOR_REAL_TIME_INVENTORY_REPORT" + - "GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT" + - "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + - "GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT" + - "GET_SALES_AND_TRAFFIC_REPORT" + - "GET_VENDOR_SALES_REPORT" + - "GET_VENDOR_INVENTORY_REPORT" + - "GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT" + - "GET_VENDOR_TRAFFIC_REPORT" + stream_name: + title: "Stream Name" + type: "string" + order: 1 + options_list: + title: "List of options" + description: "List of options" + type: "array" + order: 2 + items: + type: "object" + required: + - "option_name" + - "option_value" + properties: + option_name: + title: "Name" + type: "string" + order: 0 + option_value: + title: "Value" + type: "string" + order: 1 + wait_to_avoid_fatal_errors: + title: "Wait between requests to avoid fatal statuses in reports" + type: "boolean" + description: + "For report based streams with known amount of requests per\ + \ time period, this option will use waiting time between requests to avoid\ + \ fatal statuses in reports. See Troubleshooting section for more details" + default: false + order: 11 + sourceType: + title: "amazon-seller-partner" + const: "amazon-seller-partner" + enum: + - "amazon-seller-partner" + order: 0 + type: "string" + source-amazon-seller-partner-update: + title: "Amazon Seller Partner Spec" + type: "object" + required: + - "aws_environment" + - "region" + - "account_type" + - "lwa_app_id" + - "lwa_client_secret" + - "refresh_token" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + aws_environment: + title: "AWS Environment" + description: "Select the AWS Environment." + enum: + - "PRODUCTION" + - "SANDBOX" + default: "PRODUCTION" + type: "string" + order: 1 + region: + title: "AWS Region" + description: "Select the AWS Region." + enum: + - "AE" + - "AU" + - "BE" + - "BR" + - "CA" + - "DE" + - "EG" + - "ES" + - "FR" + - "GB" + - "IN" + - "IT" + - "JP" + - "MX" + - "NL" + - "PL" + - "SA" + - "SE" + - "SG" + - "TR" + - "UK" + - "US" + default: "US" + type: "string" + order: 2 + account_type: + title: "AWS Seller Partner Account Type" + description: + "Type of the Account you're going to authorize the Airbyte\ + \ application by" + enum: + - "Seller" + - "Vendor" + default: "Seller" + type: "string" + order: 3 + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + refresh_token: + title: "Refresh Token" + description: "The Refresh Token obtained via OAuth flow authorization." + airbyte_secret: true + order: 6 + type: "string" + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If start date is not provided\ + \ or older than 2 years ago from today, the date 2 years ago from today\ + \ will be used." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + order: 7 + type: "string" + format: "date-time" + replication_end_date: + title: "End Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$" + examples: + - "2017-01-25T00:00:00Z" + order: 8 + type: "string" + format: "date-time" + period_in_days: + title: "Period In Days" + type: "integer" + description: + "For syncs spanning a large date range, this option is used\ + \ to request data in a smaller fixed window to improve sync reliability.\ + \ This time window can be configured granularly by day." + default: 90 + minimum: 1 + order: 9 + report_options_list: + title: "Report Options" + description: + "Additional information passed to reports. This varies by report\ + \ type." + order: 10 + type: "array" + items: + type: "object" + title: "Report Options" + required: + - "report_name" + - "stream_name" + - "options_list" + properties: + report_name: + title: "Report Name" + type: "string" + order: 0 + enum: + - "GET_AFN_INVENTORY_DATA" + - "GET_AFN_INVENTORY_DATA_BY_COUNTRY" + - "GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL" + - "GET_FBA_ESTIMATED_FBA_FEES_TXT_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_PROMOTION_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_REPLACEMENT_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_ORDER_DETAIL_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_SHIPMENT_DETAIL_DATA" + - "GET_FBA_INVENTORY_PLANNING_DATA" + - "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA" + - "GET_FBA_REIMBURSEMENTS_DATA" + - "GET_FBA_SNS_FORECAST_DATA" + - "GET_FBA_SNS_PERFORMANCE_DATA" + - "GET_FBA_STORAGE_FEE_CHARGES_DATA" + - "GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_FLAT_FILE_ARCHIVED_ORDERS_DATA_BY_ORDER_DATE" + - "GET_FLAT_FILE_OPEN_LISTINGS_DATA" + - "GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE" + - "GET_LEDGER_DETAIL_VIEW_DATA" + - "GET_LEDGER_SUMMARY_VIEW_DATA" + - "GET_MERCHANT_CANCELLED_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_ALL_DATA" + - "GET_MERCHANT_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT" + - "GET_MERCHANT_LISTINGS_INACTIVE_DATA" + - "GET_MERCHANTS_LISTINGS_FYP_REPORT" + - "GET_ORDER_REPORT_DATA_SHIPPING" + - "GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT" + - "GET_SELLER_FEEDBACK_DATA" + - "GET_STRANDED_INVENTORY_UI_DATA" + - "GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE" + - "GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_XML_BROWSE_TREE_DATA" + - "GET_VENDOR_REAL_TIME_INVENTORY_REPORT" + - "GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT" + - "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + - "GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT" + - "GET_SALES_AND_TRAFFIC_REPORT" + - "GET_VENDOR_SALES_REPORT" + - "GET_VENDOR_INVENTORY_REPORT" + - "GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT" + - "GET_VENDOR_TRAFFIC_REPORT" + stream_name: + title: "Stream Name" + type: "string" + order: 1 + options_list: + title: "List of options" + description: "List of options" + type: "array" + order: 2 + items: + type: "object" + required: + - "option_name" + - "option_value" + properties: + option_name: + title: "Name" + type: "string" + order: 0 + option_value: + title: "Value" + type: "string" + order: 1 + wait_to_avoid_fatal_errors: + title: "Wait between requests to avoid fatal statuses in reports" + type: "boolean" + description: + "For report based streams with known amount of requests per\ + \ time period, this option will use waiting time between requests to avoid\ + \ fatal statuses in reports. See Troubleshooting section for more details" + default: false + order: 11 + source-northpass-lms: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "northpass-lms" + const: "northpass-lms" + enum: + - "northpass-lms" + order: 0 + type: "string" + source-northpass-lms-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + source-recreation: + type: "object" + required: + - "apikey" + - "sourceType" + properties: + apikey: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + query_campsites: + type: "string" + title: "Query Campsite" + order: 1 + sourceType: + title: "recreation" + const: "recreation" + enum: + - "recreation" + order: 0 + type: "string" + source-recreation-update: + type: "object" + required: + - "apikey" + properties: + apikey: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + query_campsites: + type: "string" + title: "Query Campsite" + order: 1 + source-breezy-hr: + type: "object" + required: + - "api_key" + - "company_id" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + company_id: + type: "string" + order: 1 + title: "Company ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "breezy-hr" + const: "breezy-hr" + enum: + - "breezy-hr" + order: 0 + type: "string" + source-breezy-hr-update: + type: "object" + required: + - "api_key" + - "company_id" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + company_id: + type: "string" + order: 1 + title: "Company ID" + airbyte_secret: true + source-linkedin-ads: + title: "Linkedin Ads Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Access Token" + type: "object" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + examples: + - "2021-05-17" + format: "date" + lookback_window: + type: "integer" + title: "Lookback Window" + default: 0 + minimum: 0 + description: "How far into the past to look for records. (in days)" + account_ids: + title: "Account IDs" + type: "array" + description: + "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." + items: + type: "integer" + examples: + - 123456789 + default: [] + ad_analytics_reports: + title: "Custom Ad Analytics Reports" + type: "array" + items: + type: "object" + title: "Ad Analytics Report Configuration" + description: "Config for custom ad Analytics Report" + required: + - "name" + - "pivot_by" + - "time_granularity" + properties: + name: + title: "Report Name" + description: "The name for the custom report." + type: "string" + pivot_by: + title: "Pivot Category" + description: + "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." + type: "string" + enum: + - "COMPANY" + - "ACCOUNT" + - "SHARE" + - "CAMPAIGN" + - "CREATIVE" + - "CAMPAIGN_GROUP" + - "CONVERSION" + - "CONVERSATION_NODE" + - "CONVERSATION_NODE_OPTION_INDEX" + - "SERVING_LOCATION" + - "CARD_INDEX" + - "MEMBER_COMPANY_SIZE" + - "MEMBER_INDUSTRY" + - "MEMBER_SENIORITY" + - "MEMBER_JOB_TITLE" + - "MEMBER_JOB_FUNCTION" + - "MEMBER_COUNTRY_V2" + - "MEMBER_REGION_V2" + - "MEMBER_COMPANY" + - "PLACEMENT_NAME" + - "IMPRESSION_DEVICE_TYPE" + time_granularity: + title: "Time Granularity" + description: + "Choose how to group the data in your report by time.\ + \ The options are:
    - 'ALL': A single result summarizing the entire\ + \ time range.
    - 'DAILY': Group results by each day.
    - 'MONTHLY':\ + \ Group results by each month.
    - 'YEARLY': Group results by each\ + \ year.
    Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." + type: "string" + enum: + - "ALL" + - "DAILY" + - "MONTHLY" + - "YEARLY" + default: [] + sourceType: + title: "linkedin-ads" + const: "linkedin-ads" + enum: + - "linkedin-ads" + order: 0 + type: "string" + source-linkedin-ads-update: + title: "Linkedin Ads Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + - title: "Access Token" + type: "object" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + examples: + - "2021-05-17" + format: "date" + lookback_window: + type: "integer" + title: "Lookback Window" + default: 0 + minimum: 0 + description: "How far into the past to look for records. (in days)" + account_ids: + title: "Account IDs" + type: "array" + description: + "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." + items: + type: "integer" + examples: + - 123456789 + default: [] + ad_analytics_reports: + title: "Custom Ad Analytics Reports" + type: "array" + items: + type: "object" + title: "Ad Analytics Report Configuration" + description: "Config for custom ad Analytics Report" + required: + - "name" + - "pivot_by" + - "time_granularity" + properties: + name: + title: "Report Name" + description: "The name for the custom report." + type: "string" + pivot_by: + title: "Pivot Category" + description: + "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." + type: "string" + enum: + - "COMPANY" + - "ACCOUNT" + - "SHARE" + - "CAMPAIGN" + - "CREATIVE" + - "CAMPAIGN_GROUP" + - "CONVERSION" + - "CONVERSATION_NODE" + - "CONVERSATION_NODE_OPTION_INDEX" + - "SERVING_LOCATION" + - "CARD_INDEX" + - "MEMBER_COMPANY_SIZE" + - "MEMBER_INDUSTRY" + - "MEMBER_SENIORITY" + - "MEMBER_JOB_TITLE" + - "MEMBER_JOB_FUNCTION" + - "MEMBER_COUNTRY_V2" + - "MEMBER_REGION_V2" + - "MEMBER_COMPANY" + - "PLACEMENT_NAME" + - "IMPRESSION_DEVICE_TYPE" + time_granularity: + title: "Time Granularity" + description: + "Choose how to group the data in your report by time.\ + \ The options are:
    - 'ALL': A single result summarizing the entire\ + \ time range.
    - 'DAILY': Group results by each day.
    - 'MONTHLY':\ + \ Group results by each month.
    - 'YEARLY': Group results by each\ + \ year.
    Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." + type: "string" + enum: + - "ALL" + - "DAILY" + - "MONTHLY" + - "YEARLY" + default: [] + source-us-census: + type: "object" + required: + - "query_path" + - "api_key" + - "sourceType" + properties: + query_params: + type: "string" + description: + "The query parameters portion of the GET request, without the\ + \ api key" + order: 0 + pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$" + examples: + - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" + - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path: + type: "string" + description: "The path portion of the GET request" + order: 1 + pattern: "^data(\\/[\\w\\d]+)+$" + examples: + - "data/2019/cbp" + - "data/2018/acs" + - "data/timeseries/healthins/sahie" + api_key: + type: "string" + description: + "Your API Key. Get your key here." + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "us-census" + const: "us-census" + enum: + - "us-census" + order: 0 + type: "string" + source-us-census-update: + type: "object" + required: + - "query_path" + - "api_key" + properties: + query_params: + type: "string" + description: + "The query parameters portion of the GET request, without the\ + \ api key" + order: 0 + pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$" + examples: + - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" + - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path: + type: "string" + description: "The path portion of the GET request" + order: 1 + pattern: "^data(\\/[\\w\\d]+)+$" + examples: + - "data/2019/cbp" + - "data/2018/acs" + - "data/timeseries/healthins/sahie" + api_key: + type: "string" + description: + "Your API Key. Get your key here." + order: 2 + airbyte_secret: true + source-goldcast: + title: "goldcast.io Source Spec" + type: "object" + required: + - "access_key" + - "sourceType" + properties: + access_key: + type: "string" + description: + "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "goldcast" + const: "goldcast" + enum: + - "goldcast" + order: 0 + type: "string" + source-goldcast-update: + title: "goldcast.io Source Spec" + type: "object" + required: + - "access_key" + properties: + access_key: + type: "string" + description: + "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + source-pinterest: + title: "Pinterest Spec" + type: "object" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set a date,\ + \ it would be defaulted to latest allowed date by api (89 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + status: + title: "Status" + description: + "For the ads, ad_groups, and campaigns streams, specifying\ + \ a status will filter out records that do not match the specified ones.\ + \ If a status is not specified, the source will default to records with\ + \ a status of either ACTIVE or PAUSED." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true + credentials: + title: "OAuth2.0" + type: "object" + required: + - "auth_method" + - "refresh_token" + - "client_id" + - "client_secret" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to obtain new Access Token, when it's expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + custom_reports: + title: "Custom Reports" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + type: "array" + items: + title: "ReportConfig" + description: "Config for custom report" + type: "object" + required: + - "name" + - "level" + - "granularity" + - "columns" + properties: + name: + title: "Name" + description: "The name value of report" + type: "string" + order: 0 + level: + title: "Level" + description: "Chosen level for API" + default: "ADVERTISER" + enum: + - "ADVERTISER" + - "ADVERTISER_TARGETING" + - "CAMPAIGN" + - "CAMPAIGN_TARGETING" + - "AD_GROUP" + - "AD_GROUP_TARGETING" + - "PIN_PROMOTION" + - "PIN_PROMOTION_TARGETING" + - "KEYWORD" + - "PRODUCT_GROUP" + - "PRODUCT_GROUP_TARGETING" + - "PRODUCT_ITEM" + type: "string" + order: 1 + granularity: + title: "Granularity" + description: "Chosen granularity for API" + default: "TOTAL" + enum: + - "TOTAL" + - "DAY" + - "HOUR" + - "WEEK" + - "MONTH" + type: "string" + order: 2 + columns: + title: "Columns" + description: "A list of chosen columns" + default: [] + type: "array" + order: 3 + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "ADVERTISER_ID" + - "AD_ACCOUNT_ID" + - "AD_GROUP_ENTITY_STATUS" + - "AD_GROUP_ID" + - "AD_ID" + - "CAMPAIGN_DAILY_SPEND_CAP" + - "CAMPAIGN_ENTITY_STATUS" + - "CAMPAIGN_ID" + - "CAMPAIGN_LIFETIME_SPEND_CAP" + - "CAMPAIGN_NAME" + - "CHECKOUT_ROAS" + - "CLICKTHROUGH_1" + - "CLICKTHROUGH_1_GROSS" + - "CLICKTHROUGH_2" + - "CPC_IN_MICRO_DOLLAR" + - "CPM_IN_DOLLAR" + - "CPM_IN_MICRO_DOLLAR" + - "CTR" + - "CTR_2" + - "ECPCV_IN_DOLLAR" + - "ECPCV_P95_IN_DOLLAR" + - "ECPC_IN_DOLLAR" + - "ECPC_IN_MICRO_DOLLAR" + - "ECPE_IN_DOLLAR" + - "ECPM_IN_MICRO_DOLLAR" + - "ECPV_IN_DOLLAR" + - "ECTR" + - "EENGAGEMENT_RATE" + - "ENGAGEMENT_1" + - "ENGAGEMENT_2" + - "ENGAGEMENT_RATE" + - "IDEA_PIN_PRODUCT_TAG_VISIT_1" + - "IDEA_PIN_PRODUCT_TAG_VISIT_2" + - "IMPRESSION_1" + - "IMPRESSION_1_GROSS" + - "IMPRESSION_2" + - "INAPP_CHECKOUT_COST_PER_ACTION" + - "OUTBOUND_CLICK_1" + - "OUTBOUND_CLICK_2" + - "PAGE_VISIT_COST_PER_ACTION" + - "PAGE_VISIT_ROAS" + - "PAID_IMPRESSION" + - "PIN_ID" + - "PIN_PROMOTION_ID" + - "REPIN_1" + - "REPIN_2" + - "REPIN_RATE" + - "SPEND_IN_DOLLAR" + - "SPEND_IN_MICRO_DOLLAR" + - "TOTAL_CHECKOUT" + - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICKTHROUGH" + - "TOTAL_CLICK_ADD_TO_CART" + - "TOTAL_CLICK_CHECKOUT" + - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICK_LEAD" + - "TOTAL_CLICK_SIGNUP" + - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CONVERSIONS" + - "TOTAL_CUSTOM" + - "TOTAL_ENGAGEMENT" + - "TOTAL_ENGAGEMENT_CHECKOUT" + - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_ENGAGEMENT_LEAD" + - "TOTAL_ENGAGEMENT_SIGNUP" + - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT" + - "TOTAL_IMPRESSION_FREQUENCY" + - "TOTAL_IMPRESSION_USER" + - "TOTAL_LEAD" + - "TOTAL_OFFLINE_CHECKOUT" + - "TOTAL_PAGE_VISIT" + - "TOTAL_REPIN_RATE" + - "TOTAL_SIGNUP" + - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIDEO_3SEC_VIEWS" + - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND" + - "TOTAL_VIDEO_MRC_VIEWS" + - "TOTAL_VIDEO_P0_COMBINED" + - "TOTAL_VIDEO_P100_COMPLETE" + - "TOTAL_VIDEO_P25_COMBINED" + - "TOTAL_VIDEO_P50_COMBINED" + - "TOTAL_VIDEO_P75_COMBINED" + - "TOTAL_VIDEO_P95_COMBINED" + - "TOTAL_VIEW_ADD_TO_CART" + - "TOTAL_VIEW_CHECKOUT" + - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIEW_LEAD" + - "TOTAL_VIEW_SIGNUP" + - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CHECKOUT" + - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CLICK_CHECKOUT" + - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_SESSIONS" + - "TOTAL_WEB_VIEW_CHECKOUT" + - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "VIDEO_3SEC_VIEWS_2" + - "VIDEO_LENGTH" + - "VIDEO_MRC_VIEWS_2" + - "VIDEO_P0_COMBINED_2" + - "VIDEO_P100_COMPLETE_2" + - "VIDEO_P25_COMBINED_2" + - "VIDEO_P50_COMBINED_2" + - "VIDEO_P75_COMBINED_2" + - "VIDEO_P95_COMBINED_2" + - "WEB_CHECKOUT_COST_PER_ACTION" + - "WEB_CHECKOUT_ROAS" + - "WEB_SESSIONS_1" + - "WEB_SESSIONS_2" + click_window_days: + title: "Click window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a pin click action." + default: 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 4 + engagement_window_days: + title: "Engagement window days" + description: + "Number of days to use as the conversion attribution\ + \ window for an engagement action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 5 + view_window_days: + title: "View window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a view action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 6 + conversion_report_time: + title: "Conversion report time" + description: + "The date by which the conversion metrics returned from\ + \ this endpoint will be reported. There are two dates associated\ + \ with a conversion event: the date that the user interacted with\ + \ the ad, and the date that the user completed a conversion event.." + default: "TIME_OF_AD_ACTION" + enum: + - "TIME_OF_AD_ACTION" + - "TIME_OF_CONVERSION" + type: "string" + order: 7 + attribution_types: + title: "Attribution types" + description: "List of types of attribution for the conversion report" + default: + - "INDIVIDUAL" + - "HOUSEHOLD" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "INDIVIDUAL" + - "HOUSEHOLD" + order: 8 + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set\ + \ a date, it would be defaulted to latest allowed date by report\ + \ api (913 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + order: 9 + sourceType: + title: "pinterest" + const: "pinterest" + enum: + - "pinterest" + order: 0 + type: "string" + source-pinterest-update: + title: "Pinterest Spec" + type: "object" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set a date,\ + \ it would be defaulted to latest allowed date by api (89 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + status: + title: "Status" + description: + "For the ads, ad_groups, and campaigns streams, specifying\ + \ a status will filter out records that do not match the specified ones.\ + \ If a status is not specified, the source will default to records with\ + \ a status of either ACTIVE or PAUSED." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true + credentials: + title: "OAuth2.0" + type: "object" + required: + - "auth_method" + - "refresh_token" + - "client_id" + - "client_secret" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to obtain new Access Token, when it's expired." + airbyte_secret: true + custom_reports: + title: "Custom Reports" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + type: "array" + items: + title: "ReportConfig" + description: "Config for custom report" + type: "object" + required: + - "name" + - "level" + - "granularity" + - "columns" + properties: + name: + title: "Name" + description: "The name value of report" + type: "string" + order: 0 + level: + title: "Level" + description: "Chosen level for API" + default: "ADVERTISER" + enum: + - "ADVERTISER" + - "ADVERTISER_TARGETING" + - "CAMPAIGN" + - "CAMPAIGN_TARGETING" + - "AD_GROUP" + - "AD_GROUP_TARGETING" + - "PIN_PROMOTION" + - "PIN_PROMOTION_TARGETING" + - "KEYWORD" + - "PRODUCT_GROUP" + - "PRODUCT_GROUP_TARGETING" + - "PRODUCT_ITEM" + type: "string" + order: 1 + granularity: + title: "Granularity" + description: "Chosen granularity for API" + default: "TOTAL" + enum: + - "TOTAL" + - "DAY" + - "HOUR" + - "WEEK" + - "MONTH" + type: "string" + order: 2 + columns: + title: "Columns" + description: "A list of chosen columns" + default: [] + type: "array" + order: 3 + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "ADVERTISER_ID" + - "AD_ACCOUNT_ID" + - "AD_GROUP_ENTITY_STATUS" + - "AD_GROUP_ID" + - "AD_ID" + - "CAMPAIGN_DAILY_SPEND_CAP" + - "CAMPAIGN_ENTITY_STATUS" + - "CAMPAIGN_ID" + - "CAMPAIGN_LIFETIME_SPEND_CAP" + - "CAMPAIGN_NAME" + - "CHECKOUT_ROAS" + - "CLICKTHROUGH_1" + - "CLICKTHROUGH_1_GROSS" + - "CLICKTHROUGH_2" + - "CPC_IN_MICRO_DOLLAR" + - "CPM_IN_DOLLAR" + - "CPM_IN_MICRO_DOLLAR" + - "CTR" + - "CTR_2" + - "ECPCV_IN_DOLLAR" + - "ECPCV_P95_IN_DOLLAR" + - "ECPC_IN_DOLLAR" + - "ECPC_IN_MICRO_DOLLAR" + - "ECPE_IN_DOLLAR" + - "ECPM_IN_MICRO_DOLLAR" + - "ECPV_IN_DOLLAR" + - "ECTR" + - "EENGAGEMENT_RATE" + - "ENGAGEMENT_1" + - "ENGAGEMENT_2" + - "ENGAGEMENT_RATE" + - "IDEA_PIN_PRODUCT_TAG_VISIT_1" + - "IDEA_PIN_PRODUCT_TAG_VISIT_2" + - "IMPRESSION_1" + - "IMPRESSION_1_GROSS" + - "IMPRESSION_2" + - "INAPP_CHECKOUT_COST_PER_ACTION" + - "OUTBOUND_CLICK_1" + - "OUTBOUND_CLICK_2" + - "PAGE_VISIT_COST_PER_ACTION" + - "PAGE_VISIT_ROAS" + - "PAID_IMPRESSION" + - "PIN_ID" + - "PIN_PROMOTION_ID" + - "REPIN_1" + - "REPIN_2" + - "REPIN_RATE" + - "SPEND_IN_DOLLAR" + - "SPEND_IN_MICRO_DOLLAR" + - "TOTAL_CHECKOUT" + - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICKTHROUGH" + - "TOTAL_CLICK_ADD_TO_CART" + - "TOTAL_CLICK_CHECKOUT" + - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICK_LEAD" + - "TOTAL_CLICK_SIGNUP" + - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CONVERSIONS" + - "TOTAL_CUSTOM" + - "TOTAL_ENGAGEMENT" + - "TOTAL_ENGAGEMENT_CHECKOUT" + - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_ENGAGEMENT_LEAD" + - "TOTAL_ENGAGEMENT_SIGNUP" + - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT" + - "TOTAL_IMPRESSION_FREQUENCY" + - "TOTAL_IMPRESSION_USER" + - "TOTAL_LEAD" + - "TOTAL_OFFLINE_CHECKOUT" + - "TOTAL_PAGE_VISIT" + - "TOTAL_REPIN_RATE" + - "TOTAL_SIGNUP" + - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIDEO_3SEC_VIEWS" + - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND" + - "TOTAL_VIDEO_MRC_VIEWS" + - "TOTAL_VIDEO_P0_COMBINED" + - "TOTAL_VIDEO_P100_COMPLETE" + - "TOTAL_VIDEO_P25_COMBINED" + - "TOTAL_VIDEO_P50_COMBINED" + - "TOTAL_VIDEO_P75_COMBINED" + - "TOTAL_VIDEO_P95_COMBINED" + - "TOTAL_VIEW_ADD_TO_CART" + - "TOTAL_VIEW_CHECKOUT" + - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIEW_LEAD" + - "TOTAL_VIEW_SIGNUP" + - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CHECKOUT" + - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CLICK_CHECKOUT" + - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_SESSIONS" + - "TOTAL_WEB_VIEW_CHECKOUT" + - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "VIDEO_3SEC_VIEWS_2" + - "VIDEO_LENGTH" + - "VIDEO_MRC_VIEWS_2" + - "VIDEO_P0_COMBINED_2" + - "VIDEO_P100_COMPLETE_2" + - "VIDEO_P25_COMBINED_2" + - "VIDEO_P50_COMBINED_2" + - "VIDEO_P75_COMBINED_2" + - "VIDEO_P95_COMBINED_2" + - "WEB_CHECKOUT_COST_PER_ACTION" + - "WEB_CHECKOUT_ROAS" + - "WEB_SESSIONS_1" + - "WEB_SESSIONS_2" + click_window_days: + title: "Click window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a pin click action." + default: 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 4 + engagement_window_days: + title: "Engagement window days" + description: + "Number of days to use as the conversion attribution\ + \ window for an engagement action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 5 + view_window_days: + title: "View window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a view action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 6 + conversion_report_time: + title: "Conversion report time" + description: + "The date by which the conversion metrics returned from\ + \ this endpoint will be reported. There are two dates associated\ + \ with a conversion event: the date that the user interacted with\ + \ the ad, and the date that the user completed a conversion event.." + default: "TIME_OF_AD_ACTION" + enum: + - "TIME_OF_AD_ACTION" + - "TIME_OF_CONVERSION" + type: "string" + order: 7 + attribution_types: + title: "Attribution types" + description: "List of types of attribution for the conversion report" + default: + - "INDIVIDUAL" + - "HOUSEHOLD" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "INDIVIDUAL" + - "HOUSEHOLD" + order: 8 + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set\ + \ a date, it would be defaulted to latest allowed date by report\ + \ api (913 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + order: 9 + source-spacex-api: + type: "object" + required: + - "sourceType" + properties: + id: + type: "string" + title: "Unique ID for specific source target" + desciption: "Optional, For a specific ID" + order: 0 + options: + type: "string" + title: "Configuration options for endpoints" + desciption: + "Optional, Possible values for an endpoint. Example values for\ + \ launches-latest, upcoming, past" + order: 1 + sourceType: + title: "spacex-api" + const: "spacex-api" + enum: + - "spacex-api" + order: 0 + type: "string" + source-spacex-api-update: + type: "object" + required: [] + properties: + id: + type: "string" + title: "Unique ID for specific source target" + desciption: "Optional, For a specific ID" + order: 0 + options: + type: "string" + title: "Configuration options for endpoints" + desciption: + "Optional, Possible values for an endpoint. Example values for\ + \ launches-latest, upcoming, past" + order: 1 + source-bamboo-hr: + title: "Bamboo HR Spec" + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + description: "Api key of bamboo hr" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + order: 1 + title: "subdomain" + description: "Sub Domain of bamboo hr" + custom_reports_fields: + type: "string" + order: 2 + title: "custom_reports_fields" + description: "Comma-separated list of fields to include in custom reports." + custom_reports_include_default_fields: + title: "custom_reports_include_default_fields" + description: + "If true, the custom reports endpoint will include the default\ + \ fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." + type: "boolean" + default: true + order: 3 + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "bamboo-hr" + const: "bamboo-hr" + enum: + - "bamboo-hr" + order: 0 + type: "string" + source-bamboo-hr-update: + title: "Bamboo HR Spec" + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + description: "Api key of bamboo hr" + airbyte_secret: true + subdomain: + type: "string" + order: 1 + title: "subdomain" + description: "Sub Domain of bamboo hr" + custom_reports_fields: + type: "string" + order: 2 + title: "custom_reports_fields" + description: "Comma-separated list of fields to include in custom reports." + custom_reports_include_default_fields: + title: "custom_reports_include_default_fields" + description: + "If true, the custom reports endpoint will include the default\ + \ fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." + type: "boolean" + default: true + order: 3 + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-freshchat: + type: "object" + required: + - "account_name" + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + account_name: + type: "string" + description: "The unique account name for your Freshchat instance" + name: "account_name" + order: 0 + title: "Account Name" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "freshchat" + const: "freshchat" + enum: + - "freshchat" + order: 0 + type: "string" + source-freshchat-update: + type: "object" + required: + - "account_name" + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + account_name: + type: "string" + description: "The unique account name for your Freshchat instance" + name: "account_name" + order: 0 + title: "Account Name" + airbyte_secret: false + source-okta: + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "OAuth 2.0 with private key" + required: + - "auth_type" + - "client_id" + - "key_id" + - "private_key" + - "scope" + properties: + auth_type: + type: "string" + const: "oauth2.0_private_key" + order: 0 + enum: + - "oauth2.0_private_key" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + key_id: + type: "string" + title: "Key ID" + description: "The key ID (kid)." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + private_key: + type: "string" + title: "Private key" + description: "The private key in PEM format" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + scope: + type: "string" + title: "Scope" + description: "The OAuth scope." + order: 4 + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: + "An Okta token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + domain: + type: "string" + title: "Okta domain" + description: + "The Okta domain. See the docs for instructions on how to find it." + airbyte_secret: false + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any\ + \ data before this date will not be replicated." + examples: + - "2022-07-22T00:00:00Z" + order: 2 + sourceType: + title: "okta" + const: "okta" + enum: + - "okta" + order: 0 + type: "string" + source-okta-update: + type: "object" + required: [] + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + - type: "object" + title: "OAuth 2.0 with private key" + required: + - "auth_type" + - "client_id" + - "key_id" + - "private_key" + - "scope" + properties: + auth_type: + type: "string" + const: "oauth2.0_private_key" + order: 0 + enum: + - "oauth2.0_private_key" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + order: 1 + key_id: + type: "string" + title: "Key ID" + description: "The key ID (kid)." + airbyte_secret: true + order: 2 + private_key: + type: "string" + title: "Private key" + description: "The private key in PEM format" + airbyte_secret: true + order: 3 + scope: + type: "string" + title: "Scope" + description: "The OAuth scope." + order: 4 + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: + "An Okta token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Okta domain" + description: + "The Okta domain. See the docs for instructions on how to find it." + airbyte_secret: false + order: 1 + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any\ + \ data before this date will not be replicated." + examples: + - "2022-07-22T00:00:00Z" + order: 2 + source-hibob: + type: "object" + required: + - "username" + - "is_sandbox" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + is_sandbox: + type: "boolean" + description: "Toggle true if this instance is a HiBob sandbox " + order: 2 + title: "Is Sandbox" + sourceType: + title: "hibob" + const: "hibob" + enum: + - "hibob" + order: 0 + type: "string" + source-hibob-update: + type: "object" + required: + - "username" + - "is_sandbox" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + is_sandbox: + type: "boolean" + description: "Toggle true if this instance is a HiBob sandbox " + order: 2 + title: "Is Sandbox" + source-mixpanel: + title: "Source Mixpanel Spec" + required: + - "credentials" + - "sourceType" + type: "object" + properties: + credentials: + title: "Authentication *" + description: "Choose how to authenticate to Mixpanel" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "Service Account" + required: + - "username" + - "secret" + - "project_id" + properties: + option_title: + type: "string" + const: "Service Account" + order: 0 + enum: + - "Service Account" + username: + order: 1 + title: "Username" + type: "string" + description: + "Mixpanel Service Account Username. See the docs\ + \ for more information on how to obtain this." + secret: + order: 2 + title: "Secret" + type: "string" + description: + "Mixpanel Service Account Secret. See the docs\ + \ for more information on how to obtain this." + airbyte_secret: true + x-speakeasy-param-sensitive: true + project_id: + order: 3 + title: "Project ID" + description: + "Your project ID number. See the docs for more information on how to obtain this." + type: "integer" + - type: "object" + title: "Project Secret" + required: + - "api_secret" + properties: + option_title: + type: "string" + const: "Project Secret" + order: 0 + enum: + - "Project Secret" + api_secret: + order: 1 + title: "Project Secret" + type: "string" + description: + "Mixpanel project secret. See the docs for more information on how to obtain this." + airbyte_secret: true + x-speakeasy-param-sensitive: true + attribution_window: + order: 2 + title: "Attribution Window" + type: "integer" + description: + "A period of time for attributing results to ads and the lookback\ + \ period after those actions occur during which ad results are counted.\ + \ Default attribution window is 5 days. (This value should be non-negative\ + \ integer)" + default: 5 + project_timezone: + order: 3 + title: "Project Timezone" + type: "string" + description: + "Time zone in which integer date times are stored. The project\ + \ timezone may be found in the project settings in the Mixpanel console." + default: "US/Pacific" + examples: + - "US/Pacific" + - "UTC" + select_properties_by_default: + order: 4 + title: "Select Properties By Default" + type: "boolean" + description: + "Setting this config parameter to TRUE ensures that new properties\ + \ on events and engage records are captured. Otherwise new properties\ + \ will be ignored." + default: true + start_date: + order: 5 + title: "Start Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If this option is not set, the connector will\ + \ replicate data from up to one year ago by default." + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + end_date: + order: 6 + title: "End Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. Left empty to always sync to most recent date" + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + region: + order: 7 + title: "Region" + description: "The region of mixpanel domain instance either US or EU." + type: "string" + enum: + - "US" + - "EU" + default: "US" + date_window_size: + order: 8 + title: "Date slicing window" + description: + "Defines window size in days, that used to slice through data.\ + \ You can reduce it, if amount of data in each window is too big for your\ + \ environment. (This value should be positive integer)" + type: "integer" + minimum: 1 + default: 30 + page_size: + order: 9 + title: "Page Size" + description: + "The number of records to fetch per request for the engage\ + \ stream. Default is 1000. If you are experiencing long sync times with\ + \ this stream, try increasing this value." + type: "integer" + minimum: 1 + default: 1000 + sourceType: + title: "mixpanel" + const: "mixpanel" + enum: + - "mixpanel" + order: 0 + type: "string" + source-mixpanel-update: + title: "Source Mixpanel Spec" + required: + - "credentials" + type: "object" + properties: + credentials: + title: "Authentication *" + description: "Choose how to authenticate to Mixpanel" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "Service Account" + required: + - "username" + - "secret" + - "project_id" + properties: + option_title: + type: "string" + const: "Service Account" + order: 0 + enum: + - "Service Account" + username: + order: 1 + title: "Username" + type: "string" + description: + "Mixpanel Service Account Username. See the docs\ + \ for more information on how to obtain this." + secret: + order: 2 + title: "Secret" + type: "string" + description: + "Mixpanel Service Account Secret. See the docs\ + \ for more information on how to obtain this." + airbyte_secret: true + project_id: + order: 3 + title: "Project ID" + description: + "Your project ID number. See the docs for more information on how to obtain this." + type: "integer" + - type: "object" + title: "Project Secret" + required: + - "api_secret" + properties: + option_title: + type: "string" + const: "Project Secret" + order: 0 + enum: + - "Project Secret" + api_secret: + order: 1 + title: "Project Secret" + type: "string" + description: + "Mixpanel project secret. See the docs for more information on how to obtain this." + airbyte_secret: true + attribution_window: + order: 2 + title: "Attribution Window" + type: "integer" + description: + "A period of time for attributing results to ads and the lookback\ + \ period after those actions occur during which ad results are counted.\ + \ Default attribution window is 5 days. (This value should be non-negative\ + \ integer)" + default: 5 + project_timezone: + order: 3 + title: "Project Timezone" + type: "string" + description: + "Time zone in which integer date times are stored. The project\ + \ timezone may be found in the project settings in the Mixpanel console." + default: "US/Pacific" + examples: + - "US/Pacific" + - "UTC" + select_properties_by_default: + order: 4 + title: "Select Properties By Default" + type: "boolean" + description: + "Setting this config parameter to TRUE ensures that new properties\ + \ on events and engage records are captured. Otherwise new properties\ + \ will be ignored." + default: true + start_date: + order: 5 + title: "Start Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If this option is not set, the connector will\ + \ replicate data from up to one year ago by default." + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + end_date: + order: 6 + title: "End Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. Left empty to always sync to most recent date" + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + region: + order: 7 + title: "Region" + description: "The region of mixpanel domain instance either US or EU." + type: "string" + enum: + - "US" + - "EU" + default: "US" + date_window_size: + order: 8 + title: "Date slicing window" + description: + "Defines window size in days, that used to slice through data.\ + \ You can reduce it, if amount of data in each window is too big for your\ + \ environment. (This value should be positive integer)" + type: "integer" + minimum: 1 + default: 30 + page_size: + order: 9 + title: "Page Size" + description: + "The number of records to fetch per request for the engage\ + \ stream. Default is 1000. If you are experiencing long sync times with\ + \ this stream, try increasing this value." + type: "integer" + minimum: 1 + default: 1000 + source-ip2whois: + type: "object" + required: + - "sourceType" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your API Key. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + domain: + type: "string" + title: "Domain" + description: + "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + order: 1 + sourceType: + title: "ip2whois" + const: "ip2whois" + enum: + - "ip2whois" + order: 0 + type: "string" + source-ip2whois-update: + type: "object" + required: [] + properties: + api_key: + type: "string" + title: "API key" + description: + "Your API Key. See here." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Domain" + description: + "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + order: 1 + source-twitter: + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + description: + "App only Bearer Token. See the docs for more information on how to obtain this token." + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + query: + type: "string" + description: + "Query for matching Tweets. You can learn how to build this\ + \ query by reading build a query guide ." + title: "Search Query" + order: 1 + start_date: + type: "string" + description: + "The start date for retrieving tweets cannot be more than 7\ + \ days in the past." + title: "Start Date" + format: "date-time" + order: 2 + end_date: + type: "string" + description: + "The end date for retrieving tweets must be a minimum of 10\ + \ seconds prior to the request time." + title: "End Date" + format: "date-time" + order: 3 + sourceType: + title: "twitter" + const: "twitter" + enum: + - "twitter" + order: 0 + type: "string" + source-twitter-update: + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + description: + "App only Bearer Token. See the docs for more information on how to obtain this token." + title: "Access Token" + airbyte_secret: true + order: 0 + query: + type: "string" + description: + "Query for matching Tweets. You can learn how to build this\ + \ query by reading build a query guide ." + title: "Search Query" + order: 1 + start_date: + type: "string" + description: + "The start date for retrieving tweets cannot be more than 7\ + \ days in the past." + title: "Start Date" + format: "date-time" + order: 2 + end_date: + type: "string" + description: + "The end date for retrieving tweets must be a minimum of 10\ + \ seconds prior to the request time." + title: "End Date" + format: "date-time" + order: 3 + source-sftp-bulk: + title: "SFTP Bulk Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + host: + title: "Host Address" + description: "The server host address" + examples: + - "www.host.com" + - "192.0.2.1" + order: 2 + type: "string" + username: + title: "User Name" + description: "The server user" + order: 3 + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the SFTP Server" + type: "object" + order: 4 + oneOf: + - title: "Authenticate via Password" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "password" + const: "password" + enum: + - "password" + type: "string" + password: + title: "Password" + description: "Password" + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + - "auth_type" + - title: "Authenticate via Private Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "private_key" + const: "private_key" + enum: + - "private_key" + type: "string" + private_key: + title: "Private key" + description: "The Private key" + multiline: true + order: 4 + type: "string" + required: + - "private_key" + - "auth_type" + port: + title: "Host Address" + description: "The server port" + default: 22 + examples: + - "22" + order: 5 + type: "integer" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + default: "/" + examples: + - "/logs/2022" + order: 6 + pattern_descriptor: "/folder_to_sync" + type: "string" + sourceType: + title: "sftp-bulk" + const: "sftp-bulk" + enum: + - "sftp-bulk" + order: 0 + type: "string" + required: + - "streams" + - "host" + - "username" + - "credentials" + - "sourceType" + source-sftp-bulk-update: + title: "SFTP Bulk Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + host: + title: "Host Address" + description: "The server host address" + examples: + - "www.host.com" + - "192.0.2.1" + order: 2 + type: "string" + username: + title: "User Name" + description: "The server user" + order: 3 + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the SFTP Server" + type: "object" + order: 4 + oneOf: + - title: "Authenticate via Password" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "password" + const: "password" + enum: + - "password" + type: "string" + password: + title: "Password" + description: "Password" + airbyte_secret: true + order: 3 + type: "string" + required: + - "password" + - "auth_type" + - title: "Authenticate via Private Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "private_key" + const: "private_key" + enum: + - "private_key" + type: "string" + private_key: + title: "Private key" + description: "The Private key" + multiline: true + order: 4 + type: "string" + required: + - "private_key" + - "auth_type" + port: + title: "Host Address" + description: "The server port" + default: 22 + examples: + - "22" + order: 5 + type: "integer" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + default: "/" + examples: + - "/logs/2022" + order: 6 + pattern_descriptor: "/folder_to_sync" + type: "string" + required: + - "streams" + - "host" + - "username" + - "credentials" + source-zendesk-support: + title: "Source Zendesk Support Spec" + type: "object" + required: + - "subdomain" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The UTC date and time from which you'd like to replicate data,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2020-10-15T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + format: "date-time" + order: 2 + subdomain: + type: "string" + title: "Subdomain" + description: + "This is your unique Zendesk subdomain that can be found in\ + \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ + \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 + credentials: + title: "Authentication" + type: "object" + description: + "Zendesk allows two authentication methods. We recommend using\ + \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ + \ Source users." + order: 1 + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The OAuth access token. See the Zendesk docs for more information on generating this token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See our full documentation for more information on generating this\ + \ token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zendesk-support" + const: "zendesk-support" + enum: + - "zendesk-support" + order: 0 + type: "string" + source-zendesk-support-update: + title: "Source Zendesk Support Spec" + type: "object" + required: + - "subdomain" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The UTC date and time from which you'd like to replicate data,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2020-10-15T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + format: "date-time" + order: 2 + subdomain: + type: "string" + title: "Subdomain" + description: + "This is your unique Zendesk subdomain that can be found in\ + \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ + \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 + credentials: + title: "Authentication" + type: "object" + description: + "Zendesk allows two authentication methods. We recommend using\ + \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ + \ Source users." + order: 1 + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The OAuth access token. See the Zendesk docs for more information on generating this token." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See our full documentation for more information on generating this\ + \ token." + airbyte_secret: true + source-microsoft-onedrive: + title: "Microsoft OneDrive Source Spec" + description: + "SourceMicrosoftOneDriveSpec class for Microsoft OneDrive Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the OneDrive API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + drive_name: + title: "Drive Name" + description: "Name of the Microsoft OneDrive drive where the file(s) exist." + default: "OneDrive" + order: 2 + type: "string" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS'\ + \ for shared items the user has access to, and 'ALL' to search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + sourceType: + title: "microsoft-onedrive" + const: "microsoft-onedrive" + enum: + - "microsoft-onedrive" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "sourceType" + source-microsoft-onedrive-update: + title: "Microsoft OneDrive Source Spec" + description: + "SourceMicrosoftOneDriveSpec class for Microsoft OneDrive Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the OneDrive API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + drive_name: + title: "Drive Name" + description: "Name of the Microsoft OneDrive drive where the file(s) exist." + default: "OneDrive" + order: 2 + type: "string" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS'\ + \ for shared items the user has access to, and 'ALL' to search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + required: + - "streams" + - "credentials" + source-appfigures: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + search_store: + type: "string" + description: "The store which needs to be searched in streams" + title: "Search Store" + default: "apple" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + group_by: + type: "string" + description: "Category term for grouping the search results" + title: "Group by" + default: "product" + enum: + - "network" + - "product" + - "country" + - "date" + order: 3 + sourceType: + title: "appfigures" + const: "appfigures" + enum: + - "appfigures" + order: 0 + type: "string" + source-appfigures-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + search_store: + type: "string" + description: "The store which needs to be searched in streams" + title: "Search Store" + default: "apple" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + group_by: + type: "string" + description: "Category term for grouping the search results" + title: "Group by" + default: "product" + enum: + - "network" + - "product" + - "country" + - "date" + order: 3 + source-tiktok-marketing: + title: "TikTok Marketing Source Spec" + type: "object" + properties: + credentials: + title: "Authentication Method" + description: "Authentication method" + default: {} + order: 0 + type: "object" + oneOf: + - title: "OAuth2.0" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + description: "Long-term Authorized Access Token." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID to filter reports and streams. Let\ + \ this empty to retrieve all." + type: "string" + required: + - "app_id" + - "secret" + - "access_token" + - title: "Sandbox Access Token" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "sandbox_access_token" + order: 0 + type: "string" + enum: + - "sandbox_access_token" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID which generated for the developer's\ + \ Sandbox application." + type: "string" + access_token: + title: "Access Token" + description: "The long-term authorized access token." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "advertiser_id" + - "access_token" + start_date: + title: "Replication Start Date" + description: + "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + format: "date" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DD. All data generated between\ + \ start_date and this date will be replicated. Not setting this option\ + \ will result in always syncing the data till the current date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + type: "string" + format: "date" + attribution_window: + title: "Attribution Window" + description: "The attribution window in days." + minimum: 0 + maximum: 364 + default: 3 + order: 3 + type: "integer" + include_deleted: + title: + "Include Deleted Data in Reports and Ads, Ad Groups and Campaign\ + \ streams." + description: + "Set to active if you want to include deleted data in report\ + \ based streams and Ads, Ad Groups and Campaign streams." + default: false + order: 4 + type: "boolean" + sourceType: + title: "tiktok-marketing" + const: "tiktok-marketing" + enum: + - "tiktok-marketing" + order: 0 + type: "string" + source-tiktok-marketing-update: + title: "TikTok Marketing Source Spec" + type: "object" + properties: + credentials: + title: "Authentication Method" + description: "Authentication method" + default: {} + order: 0 + type: "object" + oneOf: + - title: "OAuth2.0" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + access_token: + title: "Access Token" + description: "Long-term Authorized Access Token." + airbyte_secret: true + type: "string" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID to filter reports and streams. Let\ + \ this empty to retrieve all." + type: "string" + required: + - "app_id" + - "secret" + - "access_token" + - title: "Sandbox Access Token" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "sandbox_access_token" + order: 0 + type: "string" + enum: + - "sandbox_access_token" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID which generated for the developer's\ + \ Sandbox application." + type: "string" + access_token: + title: "Access Token" + description: "The long-term authorized access token." + airbyte_secret: true + type: "string" + required: + - "advertiser_id" + - "access_token" + start_date: + title: "Replication Start Date" + description: + "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + format: "date" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DD. All data generated between\ + \ start_date and this date will be replicated. Not setting this option\ + \ will result in always syncing the data till the current date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + type: "string" + format: "date" + attribution_window: + title: "Attribution Window" + description: "The attribution window in days." + minimum: 0 + maximum: 364 + default: 3 + order: 3 + type: "integer" + include_deleted: + title: + "Include Deleted Data in Reports and Ads, Ad Groups and Campaign\ + \ streams." + description: + "Set to active if you want to include deleted data in report\ + \ based streams and Ads, Ad Groups and Campaign streams." + default: false + order: 4 + type: "boolean" + source-aws-cloudtrail: + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + - "sourceType" + properties: + aws_key_id: + type: "string" + title: "Key ID" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_secret_key: + type: "string" + title: "Secret Key" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_region_name: + type: "string" + title: "Region Name" + description: + "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + default: "us-east-1" + start_date: + type: "string" + title: "Start Date" + description: + "The date you would like to replicate data. Data in AWS CloudTrail\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + lookup_attributes_filter: + title: + "Filter applied while fetching records based on AttributeKey and\ + \ AttributeValue which will be appended on the request body" + type: "object" + required: + - "attribute_key" + - "attribute_value" + properties: + attribute_key: + type: "string" + title: "Attribute Key from the response to filter" + examples: + - "EventName" + default: "EventName" + attribute_value: + type: "string" + title: "Corresponding value to the given attribute key" + examples: + - "ListInstanceAssociations" + - "ConsoleLogin" + default: "ListInstanceAssociations" + sourceType: + title: "aws-cloudtrail" + const: "aws-cloudtrail" + enum: + - "aws-cloudtrail" + order: 0 + type: "string" + source-aws-cloudtrail-update: + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + properties: + aws_key_id: + type: "string" + title: "Key ID" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + aws_secret_key: + type: "string" + title: "Secret Key" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + aws_region_name: + type: "string" + title: "Region Name" + description: + "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + default: "us-east-1" + start_date: + type: "string" + title: "Start Date" + description: + "The date you would like to replicate data. Data in AWS CloudTrail\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + lookup_attributes_filter: + title: + "Filter applied while fetching records based on AttributeKey and\ + \ AttributeValue which will be appended on the request body" + type: "object" + required: + - "attribute_key" + - "attribute_value" + properties: + attribute_key: + type: "string" + title: "Attribute Key from the response to filter" + examples: + - "EventName" + default: "EventName" + attribute_value: + type: "string" + title: "Corresponding value to the given attribute key" + examples: + - "ListInstanceAssociations" + - "ConsoleLogin" + default: "ListInstanceAssociations" + source-jira: + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + - "sourceType" + properties: + api_token: + type: "string" + title: "API Token" + description: + "Jira API Token. See the docs for more information on how to generate this key. API Token\ + \ is used for Authorization to your account by BasicAuth." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + domain: + type: "string" + title: "Domain" + examples: + - ".atlassian.net" + - ".jira.com" + - "jira..com" + description: + "The Domain for your Jira account, e.g. airbyteio.atlassian.net,\ + \ airbyteio.jira.com, jira.your-domain.com" + order: 1 + email: + type: "string" + title: "Email" + description: + "The user email for your Jira account which you used to generate\ + \ the API token. This field is used for Authorization to your account\ + \ by BasicAuth." + order: 2 + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: + "List of Jira project keys to replicate data for, or leave\ + \ it empty if you want to replicate data for all projects." + order: 3 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you want to replicate data from Jira,\ + \ use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies\ + \ to certain streams, and only data generated on or after the start date\ + \ will be replicated. Or leave it empty if you want to replicate all data.\ + \ For more information, refer to the documentation." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 4 + lookback_window_minutes: + title: "Lookback window" + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N minutes. By default, updated objects that\ + \ are not newly created are not incrementally synced." + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 5 + enable_experimental_streams: + type: "boolean" + title: "Enable Experimental Streams" + description: + "Allow the use of experimental streams which rely on undocumented\ + \ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\ + \ for more info." + default: false + order: 6 + sourceType: + title: "jira" + const: "jira" + enum: + - "jira" + order: 0 + type: "string" + source-jira-update: + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + properties: + api_token: + type: "string" + title: "API Token" + description: + "Jira API Token. See the docs for more information on how to generate this key. API Token\ + \ is used for Authorization to your account by BasicAuth." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Domain" + examples: + - ".atlassian.net" + - ".jira.com" + - "jira..com" + description: + "The Domain for your Jira account, e.g. airbyteio.atlassian.net,\ + \ airbyteio.jira.com, jira.your-domain.com" + order: 1 + email: + type: "string" + title: "Email" + description: + "The user email for your Jira account which you used to generate\ + \ the API token. This field is used for Authorization to your account\ + \ by BasicAuth." + order: 2 + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: + "List of Jira project keys to replicate data for, or leave\ + \ it empty if you want to replicate data for all projects." + order: 3 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you want to replicate data from Jira,\ + \ use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies\ + \ to certain streams, and only data generated on or after the start date\ + \ will be replicated. Or leave it empty if you want to replicate all data.\ + \ For more information, refer to the documentation." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 4 + lookback_window_minutes: + title: "Lookback window" + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N minutes. By default, updated objects that\ + \ are not newly created are not incrementally synced." + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 5 + enable_experimental_streams: + type: "boolean" + title: "Enable Experimental Streams" + description: + "Allow the use of experimental streams which rely on undocumented\ + \ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\ + \ for more info." + default: false + order: 6 + source-hubspot: + title: "HubSpot Source Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If not set, \"2006-06-01T00:00:00Z\"\ + \ (Hubspot creation date) will be used as start date. It's recommended\ + \ to provide relevant to your data start date value to optimize synchronization." + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + credentials: + title: "Authentication" + description: "Choose how to authenticate to HubSpot." + type: "object" + oneOf: + - type: "object" + title: "OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Refresh token to renew an expired access token. See\ + \ the Hubspot docs if you need help finding this token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Private App" + required: + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials set" + const: "Private App Credentials" + order: 0 + enum: + - "Private App Credentials" + access_token: + title: "Access token" + description: + "HubSpot Access token. See the Hubspot docs if you need help finding this token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + enable_experimental_streams: + title: "Enable experimental streams" + description: + "If enabled then experimental streams become available for\ + \ sync." + type: "boolean" + default: false + sourceType: + title: "hubspot" + const: "hubspot" + enum: + - "hubspot" + order: 0 + type: "string" + source-hubspot-update: + title: "HubSpot Source Spec" + type: "object" + required: + - "credentials" + properties: + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If not set, \"2006-06-01T00:00:00Z\"\ + \ (Hubspot creation date) will be used as start date. It's recommended\ + \ to provide relevant to your data start date value to optimize synchronization." + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + credentials: + title: "Authentication" + description: "Choose how to authenticate to HubSpot." + type: "object" + oneOf: + - type: "object" + title: "OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + description: + "Refresh token to renew an expired access token. See\ + \ the Hubspot docs if you need help finding this token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + - type: "object" + title: "Private App" + required: + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials set" + const: "Private App Credentials" + order: 0 + enum: + - "Private App Credentials" + access_token: + title: "Access token" + description: + "HubSpot Access token. See the Hubspot docs if you need help finding this token." + type: "string" + airbyte_secret: true + enable_experimental_streams: + title: "Enable experimental streams" + description: + "If enabled then experimental streams become available for\ + \ sync." + type: "boolean" + default: false + source-rss: + title: "RSS Spec" + type: "object" + required: + - "url" + - "sourceType" + properties: + url: + type: "string" + description: "RSS Feed URL" + sourceType: + title: "rss" + const: "rss" + enum: + - "rss" + order: 0 + type: "string" + source-rss-update: + title: "RSS Spec" + type: "object" + required: + - "url" + properties: + url: + type: "string" + description: "RSS Feed URL" + source-sap-fieldglass: + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "sap-fieldglass" + const: "sap-fieldglass" + enum: + - "sap-fieldglass" + order: 0 + type: "string" + source-sap-fieldglass-update: + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + source-twilio-taskrouter: + type: "object" + required: + - "account_sid" + - "auth_token" + - "sourceType" + properties: + account_sid: + type: "string" + description: "Twilio Account ID" + title: "Account SID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + auth_token: + type: "string" + description: "Twilio Auth Token" + airbyte_secret: true + title: "Auth Token" + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "twilio-taskrouter" + const: "twilio-taskrouter" + enum: + - "twilio-taskrouter" + order: 0 + type: "string" + source-twilio-taskrouter-update: + type: "object" + required: + - "account_sid" + - "auth_token" + properties: + account_sid: + type: "string" + description: "Twilio Account ID" + title: "Account SID" + airbyte_secret: true + order: 0 + auth_token: + type: "string" + description: "Twilio Auth Token" + airbyte_secret: true + title: "Auth Token" + order: 1 + source-xkcd: + type: "object" + properties: + comic_number: + type: "string" + title: "comic_number" + description: + "Specifies the comic number in which details are to be extracted,\ + \ pagination will begin with that number to end of available comics" + default: "2960" + order: 0 + sourceType: + title: "xkcd" + const: "xkcd" + enum: + - "xkcd" + order: 0 + type: "string" + source-xkcd-update: + type: "object" + properties: + comic_number: + type: "string" + title: "comic_number" + description: + "Specifies the comic number in which details are to be extracted,\ + \ pagination will begin with that number to end of available comics" + default: "2960" + order: 0 + source-zenloop: + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + x-speakeasy-param-sensitive: true + date_from: + type: "string" + description: + "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: + "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + x-speakeasy-param-sensitive: true + survey_group_id: + type: "string" + description: + "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zenloop" + const: "zenloop" + enum: + - "zenloop" + order: 0 + type: "string" + source-zenloop-update: + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + date_from: + type: "string" + description: + "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: + "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + survey_group_id: + type: "string" + description: + "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + source-tempo: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + title: "API token" + description: + "Tempo API Token. Go to Tempo>Settings, scroll down to Data\ + \ Access and select API integration." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "tempo" + const: "tempo" + enum: + - "tempo" + order: 0 + type: "string" + source-tempo-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + title: "API token" + description: + "Tempo API Token. Go to Tempo>Settings, scroll down to Data\ + \ Access and select API integration." + airbyte_secret: true + order: 0 + source-chargebee: + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + - "sourceType" + properties: + site_api_key: + type: "string" + title: "API Key" + description: + "Chargebee API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + order: 1 + start_date: + type: "string" + format: "date-time" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000Z.\ + \ Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-25T00:00:00Z" + order: 2 + product_catalog: + type: "string" + title: "Product Catalog" + description: + "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section. If left blank, the product catalog\ + \ version will be set to 2.0." + enum: + - "1.0" + - "2.0" + default: "2.0" + order: 3 + sourceType: + title: "chargebee" + const: "chargebee" + enum: + - "chargebee" + order: 0 + type: "string" + source-chargebee-update: + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + properties: + site_api_key: + type: "string" + title: "API Key" + description: + "Chargebee API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + order: 1 + start_date: + type: "string" + format: "date-time" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000Z.\ + \ Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-25T00:00:00Z" + order: 2 + product_catalog: + type: "string" + title: "Product Catalog" + description: + "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section. If left blank, the product catalog\ + \ version will be set to 2.0." + enum: + - "1.0" + - "2.0" + default: "2.0" + order: 3 + source-onesignal: + title: "OneSignal Source Spec" + type: "object" + required: + - "user_auth_key" + - "start_date" + - "outcome_names" + - "applications" + - "sourceType" + properties: + user_auth_key: + type: "string" + title: "User Auth Key" + description: + "OneSignal User Auth Key, see the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + applications: + type: "array" + title: "Applications" + description: + "Applications keys, see the docs for more information on how to obtain this data" + items: + type: "object" + properties: + app_name: + type: "string" + title: "OneSignal App Name" + order: 0 + app_id: + type: "string" + title: "OneSignal App ID" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + app_api_key: + type: "string" + title: "REST API Key" + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "app_id" + - "app_api_key" + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for OneSignal\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 2 + outcome_names: + type: "string" + title: "Outcome Names" + description: + "Comma-separated list of names and the value (sum/count) for\ + \ the returned outcome data. See the docs for more details" + examples: + - "os__session_duration.count,os__click.count,CustomOutcomeName.sum" + order: 3 + sourceType: + title: "onesignal" + const: "onesignal" + enum: + - "onesignal" + order: 0 + type: "string" + source-onesignal-update: + title: "OneSignal Source Spec" + type: "object" + required: + - "user_auth_key" + - "start_date" + - "outcome_names" + - "applications" + properties: + user_auth_key: + type: "string" + title: "User Auth Key" + description: + "OneSignal User Auth Key, see the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + applications: + type: "array" + title: "Applications" + description: + "Applications keys, see the docs for more information on how to obtain this data" + items: + type: "object" + properties: + app_name: + type: "string" + title: "OneSignal App Name" + order: 0 + app_id: + type: "string" + title: "OneSignal App ID" + order: 1 + airbyte_secret: true + app_api_key: + type: "string" + title: "REST API Key" + order: 2 + airbyte_secret: true + required: + - "app_id" + - "app_api_key" + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for OneSignal\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 2 + outcome_names: + type: "string" + title: "Outcome Names" + description: + "Comma-separated list of names and the value (sum/count) for\ + \ the returned outcome data. See the docs for more details" + examples: + - "os__session_duration.count,os__click.count,CustomOutcomeName.sum" + order: 3 + source-google-analytics-data-api: + title: "Google Analytics (Data API) Spec" + type: "object" + required: + - "property_ids" + - "sourceType" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Google Analytics developer\ + \ application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining a new access token." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + credentials_json: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key linked to the service account used for\ + \ authorization. For steps on obtaining this key, refer to the setup guide." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + property_ids: + title: "Property IDs" + description: + "A list of your Property IDs. The Property ID is a unique number\ + \ assigned to each property in Google Analytics, found in your GA4 property\ + \ URL. This ID allows the connector to track the specific events associated\ + \ with your property. Refer to the Google\ + \ Analytics documentation to locate your property ID." + order: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + examples: + - - "1738294" + - "5729978930" + uniqueItems: true + date_ranges_start_date: + type: "string" + title: "Start Date" + description: + "The start date from which to replicate report data in the\ + \ format YYYY-MM-DD. Data generated before this date will not be included\ + \ in the report. Not applied to custom Cohort reports." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 4 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name." + type: "string" + order: 0 + dimensions: + title: "Dimensions" + description: "A list of dimensions." + type: "array" + items: + type: "string" + minItems: 1 + order: 1 + metrics: + title: "Metrics" + description: "A list of metrics." + type: "array" + items: + type: "string" + minItems: 1 + order: 2 + dimensionFilter: + title: "Dimensions filter" + description: "Dimensions filter" + type: "object" + order: 3 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all dimensions." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + metricFilter: + title: "Metrics filter" + description: "Metrics filter" + type: "object" + order: 4 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all metrics." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + cohortSpec: + title: "Cohort Reports" + description: + "Cohort reports creates a time series of user retention\ + \ for the cohort." + type: "object" + order: 5 + oneOf: + - title: "Disabled" + type: "object" + properties: + enabled: + type: "string" + const: "false" + enum: + - "false" + - title: "Enabled" + type: "object" + properties: + enabled: + type: "string" + const: "true" + enum: + - "true" + cohorts: + name: "Cohorts" + order: 0 + type: "array" + always_show: true + items: + title: "Cohorts" + type: "object" + required: + - "dimension" + - "dateRange" + properties: + name: + title: "Name" + type: "string" + always_show: true + pattern: "^(?!(cohort_|RESERVED_)).*$" + description: + "Assigns a name to this cohort. If not set,\ + \ cohorts are named by their zero based index cohort_0,\ + \ cohort_1, etc." + order: 0 + dimension: + title: "Dimension" + description: + "Dimension used by the cohort. Required and\ + \ only supports `firstSessionDate`" + type: "string" + enum: + - "firstSessionDate" + order: 1 + dateRange: + type: "object" + required: + - "startDate" + - "endDate" + properties: + startDate: + title: "Start Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + endDate: + title: "End Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 3 + cohortsRange: + type: "object" + order: 1 + required: + - "granularity" + - "endOffset" + properties: + granularity: + title: "Granularity" + description: + "The granularity used to interpret the startOffset\ + \ and endOffset for the extended reporting date range\ + \ for a cohort report." + type: "string" + enum: + - "GRANULARITY_UNSPECIFIED" + - "DAILY" + - "WEEKLY" + - "MONTHLY" + order: 0 + startOffset: + title: "Start Offset" + description: + "Specifies the start date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 1 + endOffset: + title: "End Offset" + description: + "Specifies the end date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 2 + cohortReportSettings: + type: "object" + title: "Cohort Report Settings" + description: "Optional settings for a cohort report." + properties: + accumulate: + always_show: true + title: "Accumulate" + description: + "If true, accumulates the result from first\ + \ touch day to the end day" + type: "boolean" + required: + - "name" + - "dimensions" + - "metrics" + window_in_days: + type: "integer" + title: "Data Request Interval (Days)" + description: + "The interval in days for each data request made to the Google\ + \ Analytics API. A larger value speeds up data sync, but increases the\ + \ chance of data sampling, which may result in inaccuracies. We recommend\ + \ a value of 1 to minimize sampling, unless speed is an absolute priority\ + \ over accuracy. Acceptable values range from 1 to 364. Does not apply\ + \ to custom Cohort reports. More information is available in the documentation." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + minimum: 1 + maximum: 364 + default: 1 + order: 5 + lookback_window: + type: "integer" + title: "Lookback window (Days)" + description: + "Since attribution changes after the event date, and Google\ + \ Analytics has a data processing latency, we should specify how many\ + \ days in the past we should refresh the data in every run. So if you\ + \ set it at 5 days, in every sync it will fetch the last bookmark date\ + \ minus 5 days." + examples: + - 2 + - 3 + - 4 + - 7 + - 14 + - 28 + minimum: 2 + maximum: 60 + default: 2 + order: 6 + keep_empty_rows: + type: "boolean" + title: "Keep Empty Rows" + description: + "If false, each row with all metrics equal to 0 will not be\ + \ returned. If true, these rows will be returned if they are not separately\ + \ removed by a filter. More information is available in the documentation." + default: false + order: 7 + convert_conversions_event: + type: "boolean" + title: "Convert `conversions:*` Metrics to Float" + description: + "Enables conversion of `conversions:*` event metrics from integers\ + \ to floats. This is beneficial for preventing data rounding when the\ + \ API returns float values for any `conversions:*` fields." + default: false + order: 8 + sourceType: + title: "google-analytics-data-api" + const: "google-analytics-data-api" + enum: + - "google-analytics-data-api" + order: 0 + type: "string" + source-google-analytics-data-api-update: + title: "Google Analytics (Data API) Spec" + type: "object" + required: + - "property_ids" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Google Analytics developer\ + \ application." + airbyte_secret: true + order: 2 + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining a new access token." + airbyte_secret: true + order: 3 + access_token: + title: "Access Token" + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 4 + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + credentials_json: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key linked to the service account used for\ + \ authorization. For steps on obtaining this key, refer to the setup guide." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + order: 1 + property_ids: + title: "Property IDs" + description: + "A list of your Property IDs. The Property ID is a unique number\ + \ assigned to each property in Google Analytics, found in your GA4 property\ + \ URL. This ID allows the connector to track the specific events associated\ + \ with your property. Refer to the Google\ + \ Analytics documentation to locate your property ID." + order: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + examples: + - - "1738294" + - "5729978930" + uniqueItems: true + date_ranges_start_date: + type: "string" + title: "Start Date" + description: + "The start date from which to replicate report data in the\ + \ format YYYY-MM-DD. Data generated before this date will not be included\ + \ in the report. Not applied to custom Cohort reports." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 4 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name." + type: "string" + order: 0 + dimensions: + title: "Dimensions" + description: "A list of dimensions." + type: "array" + items: + type: "string" + minItems: 1 + order: 1 + metrics: + title: "Metrics" + description: "A list of metrics." + type: "array" + items: + type: "string" + minItems: 1 + order: 2 + dimensionFilter: + title: "Dimensions filter" + description: "Dimensions filter" + type: "object" + order: 3 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all dimensions." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + metricFilter: + title: "Metrics filter" + description: "Metrics filter" + type: "object" + order: 4 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all metrics." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + cohortSpec: + title: "Cohort Reports" + description: + "Cohort reports creates a time series of user retention\ + \ for the cohort." + type: "object" + order: 5 + oneOf: + - title: "Disabled" + type: "object" + properties: + enabled: + type: "string" + const: "false" + enum: + - "false" + - title: "Enabled" + type: "object" + properties: + enabled: + type: "string" + const: "true" + enum: + - "true" + cohorts: + name: "Cohorts" + order: 0 + type: "array" + always_show: true + items: + title: "Cohorts" + type: "object" + required: + - "dimension" + - "dateRange" + properties: + name: + title: "Name" + type: "string" + always_show: true + pattern: "^(?!(cohort_|RESERVED_)).*$" + description: + "Assigns a name to this cohort. If not set,\ + \ cohorts are named by their zero based index cohort_0,\ + \ cohort_1, etc." + order: 0 + dimension: + title: "Dimension" + description: + "Dimension used by the cohort. Required and\ + \ only supports `firstSessionDate`" + type: "string" + enum: + - "firstSessionDate" + order: 1 + dateRange: + type: "object" + required: + - "startDate" + - "endDate" + properties: + startDate: + title: "Start Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + endDate: + title: "End Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 3 + cohortsRange: + type: "object" + order: 1 + required: + - "granularity" + - "endOffset" + properties: + granularity: + title: "Granularity" + description: + "The granularity used to interpret the startOffset\ + \ and endOffset for the extended reporting date range\ + \ for a cohort report." + type: "string" + enum: + - "GRANULARITY_UNSPECIFIED" + - "DAILY" + - "WEEKLY" + - "MONTHLY" + order: 0 + startOffset: + title: "Start Offset" + description: + "Specifies the start date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 1 + endOffset: + title: "End Offset" + description: + "Specifies the end date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 2 + cohortReportSettings: + type: "object" + title: "Cohort Report Settings" + description: "Optional settings for a cohort report." + properties: + accumulate: + always_show: true + title: "Accumulate" + description: + "If true, accumulates the result from first\ + \ touch day to the end day" + type: "boolean" + required: + - "name" + - "dimensions" + - "metrics" + window_in_days: + type: "integer" + title: "Data Request Interval (Days)" + description: + "The interval in days for each data request made to the Google\ + \ Analytics API. A larger value speeds up data sync, but increases the\ + \ chance of data sampling, which may result in inaccuracies. We recommend\ + \ a value of 1 to minimize sampling, unless speed is an absolute priority\ + \ over accuracy. Acceptable values range from 1 to 364. Does not apply\ + \ to custom Cohort reports. More information is available in the documentation." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + minimum: 1 + maximum: 364 + default: 1 + order: 5 + lookback_window: + type: "integer" + title: "Lookback window (Days)" + description: + "Since attribution changes after the event date, and Google\ + \ Analytics has a data processing latency, we should specify how many\ + \ days in the past we should refresh the data in every run. So if you\ + \ set it at 5 days, in every sync it will fetch the last bookmark date\ + \ minus 5 days." + examples: + - 2 + - 3 + - 4 + - 7 + - 14 + - 28 + minimum: 2 + maximum: 60 + default: 2 + order: 6 + keep_empty_rows: + type: "boolean" + title: "Keep Empty Rows" + description: + "If false, each row with all metrics equal to 0 will not be\ + \ returned. If true, these rows will be returned if they are not separately\ + \ removed by a filter. More information is available in the documentation." + default: false + order: 7 + convert_conversions_event: + type: "boolean" + title: "Convert `conversions:*` Metrics to Float" + description: + "Enables conversion of `conversions:*` event metrics from integers\ + \ to floats. This is beneficial for preventing data rounding when the\ + \ API returns float values for any `conversions:*` fields." + default: false + order: 8 + source-mailgun: + type: "object" + required: + - "private_key" + - "sourceType" + properties: + private_key: + type: "string" + order: 0 + title: "Private API Key" + description: "Primary account API key to access your Mailgun data." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain_region: + type: "string" + order: 1 + title: "Domain Region Code" + description: + "Domain region code. 'EU' or 'US' are possible values. The\ + \ default is 'US'." + default: "US" + enum: + - "US" + - "EU" + start_date: + type: "string" + order: 2 + title: "Replication Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2023-08-01T00:00:00Z" + description: + "UTC date and time in the format 2020-10-01 00:00:00. Any data\ + \ before this date will not be replicated. If omitted, defaults to 3 days\ + \ ago." + sourceType: + title: "mailgun" + const: "mailgun" + enum: + - "mailgun" + order: 0 + type: "string" + source-mailgun-update: + type: "object" + required: + - "private_key" + properties: + private_key: + type: "string" + order: 0 + title: "Private API Key" + description: "Primary account API key to access your Mailgun data." + airbyte_secret: true + domain_region: + type: "string" + order: 1 + title: "Domain Region Code" + description: + "Domain region code. 'EU' or 'US' are possible values. The\ + \ default is 'US'." + default: "US" + enum: + - "US" + - "EU" + start_date: + type: "string" + order: 2 + title: "Replication Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2023-08-01T00:00:00Z" + description: + "UTC date and time in the format 2020-10-01 00:00:00. Any data\ + \ before this date will not be replicated. If omitted, defaults to 3 days\ + \ ago." + source-intercom: + title: "Source Intercom Spec" + type: "object" + required: + - "start_date" + - "access_token" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + title: "Access token" + type: "string" + description: + "Access token for making authenticated requests. See the Intercom docs for more information." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + activity_logs_time_step: + type: "integer" + default: 30 + minimum: 1 + maximum: 91 + title: "Activity logs stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of Activity\ + \ Logs stream." + examples: + - 30 + - 10 + - 5 + order: 3 + sourceType: + title: "intercom" + const: "intercom" + enum: + - "intercom" + order: 0 + type: "string" + source-intercom-update: + title: "Source Intercom Spec" + type: "object" + required: + - "start_date" + - "access_token" + properties: + start_date: + type: "string" + title: "Start date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + title: "Access token" + type: "string" + description: + "Access token for making authenticated requests. See the Intercom docs for more information." + airbyte_secret: true + order: 0 + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + activity_logs_time_step: + type: "integer" + default: 30 + minimum: 1 + maximum: 91 + title: "Activity logs stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of Activity\ + \ Logs stream." + examples: + - 30 + - 10 + - 5 + order: 3 + source-rki-covid: + title: "RKI Covid Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format 2017-01-25. Any data before this date\ + \ will not be replicated." + order: 1 + sourceType: + title: "rki-covid" + const: "rki-covid" + enum: + - "rki-covid" + order: 0 + type: "string" + source-rki-covid-update: + title: "RKI Covid Spec" + type: "object" + required: + - "start_date" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format 2017-01-25. Any data before this date\ + \ will not be replicated." + order: 1 + source-secoda: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Api Key" + airbyte_secret: true + description: + "Your API Access Key. See here. The key is case sensitive." + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "secoda" + const: "secoda" + enum: + - "secoda" + order: 0 + type: "string" + source-secoda-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "Api Key" + airbyte_secret: true + description: + "Your API Access Key. See here. The key is case sensitive." + order: 0 + source-zoom: + title: "Zoom Spec" + type: "object" + required: + - "account_id" + - "client_id" + - "client_secret" + - "authorization_endpoint" + - "sourceType" + properties: + account_id: + type: "string" + order: 0 + description: + "The account ID for your Zoom account. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + client_id: + type: "string" + order: 1 + description: + "The client ID for your Zoom app. You can find this in the\ + \ Zoom Marketplace under the \"Manage\" tab for your app." + client_secret: + type: "string" + order: 2 + description: + "The client secret for your Zoom app. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + airbyte_secret: true + x-speakeasy-param-sensitive: true + authorization_endpoint: + type: "string" + order: 3 + default: "https://zoom.us/oauth/token" + sourceType: + title: "zoom" + const: "zoom" + enum: + - "zoom" + order: 0 + type: "string" + source-zoom-update: + title: "Zoom Spec" + type: "object" + required: + - "account_id" + - "client_id" + - "client_secret" + - "authorization_endpoint" + properties: + account_id: + type: "string" + order: 0 + description: + "The account ID for your Zoom account. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + client_id: + type: "string" + order: 1 + description: + "The client ID for your Zoom app. You can find this in the\ + \ Zoom Marketplace under the \"Manage\" tab for your app." + client_secret: + type: "string" + order: 2 + description: + "The client secret for your Zoom app. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + airbyte_secret: true + authorization_endpoint: + type: "string" + order: 3 + default: "https://zoom.us/oauth/token" + source-delighted: + title: "Delighted Spec" + type: "object" + required: + - "since" + - "api_key" + - "sourceType" + properties: + api_key: + title: "Delighted API Key" + type: "string" + description: "A Delighted API key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + since: + title: "Replication Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + examples: + - "2022-05-30T04:50:23Z" + - "2022-05-30 04:50:23" + pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" + order: 1 + format: "date-time" + sourceType: + title: "delighted" + const: "delighted" + enum: + - "delighted" + order: 0 + type: "string" + source-delighted-update: + title: "Delighted Spec" + type: "object" + required: + - "since" + - "api_key" + properties: + api_key: + title: "Delighted API Key" + type: "string" + description: "A Delighted API key." + airbyte_secret: true + order: 0 + since: + title: "Replication Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + examples: + - "2022-05-30T04:50:23Z" + - "2022-05-30 04:50:23" + pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" + order: 1 + format: "date-time" + source-klarna: + title: "Klarna Spec" + type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + - "sourceType" + properties: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "na" + - "oc" + description: + "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'na', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: + "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: + "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: + "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "klarna" + const: "klarna" + enum: + - "klarna" + order: 0 + type: "string" + source-klarna-update: + title: "Klarna Spec" + type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + properties: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "na" + - "oc" + description: + "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'na', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: + "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: + "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: + "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + source-typeform: + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Typeform account and then generate a personal\ + \ Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Typeform\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + format: "date-time" + form_ids: + title: "Form IDs to replicate" + description: + "When this parameter is set, the connector will replicate data\ + \ only from the input forms. Otherwise, all forms in your Typeform account\ + \ will be replicated. You can find form IDs in your form URLs. For example,\ + \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\ + \ u6nXL7. You can find form URLs on Share panel" + type: "array" + items: + type: "string" + uniqueItems: true + order: 3 + sourceType: + title: "typeform" + const: "typeform" + enum: + - "typeform" + order: 0 + type: "string" + source-typeform-update: + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Typeform account and then generate a personal\ + \ Access Token." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Typeform\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + format: "date-time" + form_ids: + title: "Form IDs to replicate" + description: + "When this parameter is set, the connector will replicate data\ + \ only from the input forms. Otherwise, all forms in your Typeform account\ + \ will be replicated. You can find form IDs in your form URLs. For example,\ + \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\ + \ u6nXL7. You can find form URLs on Share panel" + type: "array" + items: + type: "string" + uniqueItems: true + order: 3 + source-dremio: + title: "Dremio Spec" + type: "object" + required: + - "api_key" + - "base_url" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API Key that is generated when you authenticate to Dremio\ + \ API" + airbyte_secret: true + x-speakeasy-param-sensitive: true + base_url: + type: "string" + description: "URL of your Dremio instance" + default: "https://app.dremio.cloud" + sourceType: + title: "dremio" + const: "dremio" + enum: + - "dremio" + order: 0 + type: "string" + source-dremio-update: + title: "Dremio Spec" + type: "object" + required: + - "api_key" + - "base_url" + properties: + api_key: + type: "string" + description: + "API Key that is generated when you authenticate to Dremio\ + \ API" + airbyte_secret: true + base_url: + type: "string" + description: "URL of your Dremio instance" + default: "https://app.dremio.cloud" + source-cimis: + type: "object" + required: + - "api_key" + - "targets_type" + - "targets" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + targets_type: + type: "string" + title: "Targets Type" + enum: + - "WSN station numbers" + - "California zip codes" + - "decimal-degree coordinates" + - "street addresses" + order: 1 + targets: + type: "array" + title: "Targets" + order: 2 + daily_data_items: + type: "array" + title: "Daily Data Items" + enum: + - "day-air-tmp-avg" + - "day-air-tmp-min" + - "day-dew-pnt" + - "day-eto" + - "day-asce-eto" + - "day-asce-etr" + - "day-precip" + - "day-rel-hum-avg" + - "day-rel-hum-max" + - "day-rel-hum-min" + - "day-soil-tmp-avg" + - "day-soil-tmp-max" + - "day-soil-tmp-min" + - "day-sol-rad-avg" + - "day-sol-rad-net" + - "day-vap-pres-max" + - "day-vap-pres-avg" + - "day-wind-ene" + - "day-wind-ese" + - "day-wind-nne" + - "day-wind-nnw" + - "day-wind-run" + - "day-wind-spd-avg" + - "day-wind-ssw" + - "day-wind-wnw" + - "day-wind-wsw" + order: 3 + hourly_data_items: + type: "array" + title: "Hourly Data Items" + enum: + - "hly-air-tmp" + - "hly-dew-pnt" + - "hly-eto" + - "hly-net-rad" + - "hly-asce-eto" + - "hly-asce-etr" + - "hly-precip" + - "hly-rel-hum" + - "hly-res-wind" + - "hly-soil-tmp" + - "hly-sol-rad" + - "hly-vap-pres" + - "hly-wind-dir" + - "hly-wind-spd" + order: 4 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + unit_of_measure: + type: "string" + title: "Unit of Measure" + enum: + - "E" + - "M" + order: 7 + sourceType: + title: "cimis" + const: "cimis" + enum: + - "cimis" + order: 0 + type: "string" + source-cimis-update: + type: "object" + required: + - "api_key" + - "targets_type" + - "targets" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 0 + targets_type: + type: "string" + title: "Targets Type" + enum: + - "WSN station numbers" + - "California zip codes" + - "decimal-degree coordinates" + - "street addresses" + order: 1 + targets: + type: "array" + title: "Targets" + order: 2 + daily_data_items: + type: "array" + title: "Daily Data Items" + enum: + - "day-air-tmp-avg" + - "day-air-tmp-min" + - "day-dew-pnt" + - "day-eto" + - "day-asce-eto" + - "day-asce-etr" + - "day-precip" + - "day-rel-hum-avg" + - "day-rel-hum-max" + - "day-rel-hum-min" + - "day-soil-tmp-avg" + - "day-soil-tmp-max" + - "day-soil-tmp-min" + - "day-sol-rad-avg" + - "day-sol-rad-net" + - "day-vap-pres-max" + - "day-vap-pres-avg" + - "day-wind-ene" + - "day-wind-ese" + - "day-wind-nne" + - "day-wind-nnw" + - "day-wind-run" + - "day-wind-spd-avg" + - "day-wind-ssw" + - "day-wind-wnw" + - "day-wind-wsw" + order: 3 + hourly_data_items: + type: "array" + title: "Hourly Data Items" + enum: + - "hly-air-tmp" + - "hly-dew-pnt" + - "hly-eto" + - "hly-net-rad" + - "hly-asce-eto" + - "hly-asce-etr" + - "hly-precip" + - "hly-rel-hum" + - "hly-res-wind" + - "hly-soil-tmp" + - "hly-sol-rad" + - "hly-vap-pres" + - "hly-wind-dir" + - "hly-wind-spd" + order: 4 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + unit_of_measure: + type: "string" + title: "Unit of Measure" + enum: + - "E" + - "M" + order: 7 + source-paypal-transaction: + type: "object" + required: + - "client_id" + - "client_secret" + - "start_date" + - "is_sandbox" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Paypal developer application." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "The Client Secret of your Paypal developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\ + \ present time." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 2 + is_sandbox: + title: "Sandbox" + description: "Determines whether to use the sandbox or production environment." + type: "boolean" + default: false + dispute_start_date: + title: "Dispute Start Date Range" + description: + "Start Date parameter for the list dispute endpoint in ISO format.\ + \ This Start Date must be in range within 180 days before present time,\ + \ and requires ONLY 3 miliseconds(mandatory). If you don't use this option,\ + \ it defaults to a start date set 180 days in the past." + type: "string" + examples: + - "2021-06-11T23:59:59.000Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{3}Z$" + format: "date-time" + order: 3 + end_date: + title: "End Date" + description: + "End Date for data extraction in ISO format. This can be help you select specific range of time,\ + \ mainly for test purposes or data integrity tests. When this is not\ + \ used, now_utc() is used by the streams. This does not apply to Disputes\ + \ and Product streams." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 4 + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + time_window: + type: "integer" + title: "Number of days per request" + description: + "The number of days per request. Must be a number between 1\ + \ and 31." + default: 7 + minimum: 1 + maximum: 31 + sourceType: + title: "paypal-transaction" + const: "paypal-transaction" + enum: + - "paypal-transaction" + order: 0 + type: "string" + source-paypal-transaction-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "start_date" + - "is_sandbox" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Paypal developer application." + airbyte_secret: true + order: 0 + client_secret: + type: "string" + title: "Client secret" + description: "The Client Secret of your Paypal developer application." + airbyte_secret: true + order: 1 + start_date: + title: "Start Date" + description: + "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\ + \ present time." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 2 + is_sandbox: + title: "Sandbox" + description: "Determines whether to use the sandbox or production environment." + type: "boolean" + default: false + dispute_start_date: + title: "Dispute Start Date Range" + description: + "Start Date parameter for the list dispute endpoint in ISO format.\ + \ This Start Date must be in range within 180 days before present time,\ + \ and requires ONLY 3 miliseconds(mandatory). If you don't use this option,\ + \ it defaults to a start date set 180 days in the past." + type: "string" + examples: + - "2021-06-11T23:59:59.000Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{3}Z$" + format: "date-time" + order: 3 + end_date: + title: "End Date" + description: + "End Date for data extraction in ISO format. This can be help you select specific range of time,\ + \ mainly for test purposes or data integrity tests. When this is not\ + \ used, now_utc() is used by the streams. This does not apply to Disputes\ + \ and Product streams." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 4 + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + time_window: + type: "integer" + title: "Number of days per request" + description: + "The number of days per request. Must be a number between 1\ + \ and 31." + default: 7 + minimum: 1 + maximum: 31 + source-lemlist: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + title": "API key" + description: "Lemlist API key," + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "lemlist" + const: "lemlist" + enum: + - "lemlist" + order: 0 + type: "string" + source-lemlist-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + title": "API key" + description: "Lemlist API key," + order: 0 + source-pexels-api: + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key from the pexels website" + airbyte_secret: true + description: + "API key is required to access pexels api, For getting your's\ + \ goto https://www.pexels.com/api/documentation and create account for\ + \ free." + order: 0 + x-speakeasy-param-sensitive: true + color: + type: "string" + title: "Specific color for the search" + description: + "Optional, Desired photo color. Supported colors red, orange,\ + \ yellow, green, turquoise, blue, violet, pink, brown, black, gray, white\ + \ or any hexidecimal color code." + examples: + - "red" + - "orange" + order: 1 + locale: + type: "string" + title: "Specific locale for the search" + description: + "Optional, The locale of the search you are performing. The\ + \ current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE'\ + \ 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR'\ + \ 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR'\ + \ 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'." + examples: + - "en-US" + - "pt-BR" + order: 2 + orientation: + type: "string" + title: "Specific orientation for the search" + description: + "Optional, Desired photo orientation. The current supported\ + \ orientations are landscape, portrait or square" + examples: + - "square" + - "landscape" + order: 3 + query: + type: "string" + title: "Specific query for the search" + description: + "Optional, the search query, Example Ocean, Tigers, Pears,\ + \ etc." + examples: + - "people" + - "oceans" + order: 4 + size: + type: "string" + title: "Specific size for the search" + description: + "Optional, Minimum photo size. The current supported sizes\ + \ are large(24MP), medium(12MP) or small(4MP)." + examples: + - "large" + - "small" + order: 5 + sourceType: + title: "pexels-api" + const: "pexels-api" + enum: + - "pexels-api" + order: 0 + type: "string" + source-pexels-api-update: + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + title: "API Key from the pexels website" + airbyte_secret: true + description: + "API key is required to access pexels api, For getting your's\ + \ goto https://www.pexels.com/api/documentation and create account for\ + \ free." + order: 0 + color: + type: "string" + title: "Specific color for the search" + description: + "Optional, Desired photo color. Supported colors red, orange,\ + \ yellow, green, turquoise, blue, violet, pink, brown, black, gray, white\ + \ or any hexidecimal color code." + examples: + - "red" + - "orange" + order: 1 + locale: + type: "string" + title: "Specific locale for the search" + description: + "Optional, The locale of the search you are performing. The\ + \ current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE'\ + \ 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR'\ + \ 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR'\ + \ 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'." + examples: + - "en-US" + - "pt-BR" + order: 2 + orientation: + type: "string" + title: "Specific orientation for the search" + description: + "Optional, Desired photo orientation. The current supported\ + \ orientations are landscape, portrait or square" + examples: + - "square" + - "landscape" + order: 3 + query: + type: "string" + title: "Specific query for the search" + description: + "Optional, the search query, Example Ocean, Tigers, Pears,\ + \ etc." + examples: + - "people" + - "oceans" + order: 4 + size: + type: "string" + title: "Specific size for the search" + description: + "Optional, Minimum photo size. The current supported sizes\ + \ are large(24MP), medium(12MP) or small(4MP)." + examples: + - "large" + - "small" + order: 5 + source-leadfeeder: + type: "object" + required: + - "api_token" + - "start_date" + - "sourceType" + properties: + api_token: + type: "string" + order: 0 + title: "Api Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "leadfeeder" + const: "leadfeeder" + enum: + - "leadfeeder" + order: 0 + type: "string" + source-leadfeeder-update: + type: "object" + required: + - "api_token" + - "start_date" + properties: + api_token: + type: "string" + order: 0 + title: "Api Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-glassfrog: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API key provided by Glassfrog" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "glassfrog" + const: "glassfrog" + enum: + - "glassfrog" + order: 0 + type: "string" + source-glassfrog-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API key provided by Glassfrog" + order: 0 + source-appcues: + type: "object" + required: + - "username" + - "account_id" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + description: "Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)" + order: 2 + title: "Account ID" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "appcues" + const: "appcues" + enum: + - "appcues" + order: 0 + type: "string" + source-appcues-update: + type: "object" + required: + - "username" + - "account_id" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + account_id: + type: "string" + description: "Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)" + order: 2 + title: "Account ID" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-facebook-marketing: + title: "Source Facebook Marketing" + type: "object" + properties: + account_ids: + title: "Ad Account ID(s)" + description: + "The Facebook Ad account ID(s) to pull data from. The Ad account\ + \ ID number is in the account dropdown menu or in your browser's address\ + \ bar of your Meta Ads Manager. See the docs for more information." + order: 0 + pattern_descriptor: "The Ad Account ID must be a number." + examples: + - "111111111111111" + minItems: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]+$" + uniqueItems: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’s Dashboard,\ + \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\ + \ ads_read, read_insights, business_management. Then click on \"Get\ + \ token\". See the docs for more information." + order: 1 + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + credentials: + title: "Authentication" + description: "Credentials for connecting to the Facebook Marketing API" + type: "object" + oneOf: + - title: "Authenticate via Facebook Marketing (Oauth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Facebook Marketing API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret for the Facebook Marketing API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "access_token" + - "auth_type" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for all incremental\ + \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\ + \ will be replicated for usual streams and only last 2 years for insight\ + \ streams." + order: 2 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not setting\ + \ this option will result in always syncing the latest data." + order: 3 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + campaign_statuses: + title: "Campaign Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 4 + type: "array" + items: + title: "ValidCampaignStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + adset_statuses: + title: "AdSet Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 5 + type: "array" + items: + title: "ValidAdSetStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + ad_statuses: + title: "Ad Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 6 + type: "array" + items: + title: "ValidAdStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ADSET_PAUSED" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "DISAPPROVED" + - "IN_PROCESS" + - "PAUSED" + - "PENDING_BILLING_INFO" + - "PENDING_REVIEW" + - "PREAPPROVED" + - "WITH_ISSUES" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images from Ad Creative" + description: + "Set to active if you want to fetch the thumbnail_url and store\ + \ the result in thumbnail_data_url for each Ad Creative." + default: false + order: 7 + type: "boolean" + custom_insights: + title: "Custom Insights" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + order: 8 + type: "array" + items: + title: "InsightConfig" + description: "Config for custom insights" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + level: + title: "Level" + description: "Chosen level for API" + default: "ad" + enum: + - "ad" + - "adset" + - "campaign" + - "account" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "account_currency" + - "account_id" + - "account_name" + - "action_values" + - "actions" + - "ad_click_actions" + - "ad_id" + - "ad_impression_actions" + - "ad_name" + - "adset_end" + - "adset_id" + - "adset_name" + - "age_targeting" + - "attribution_setting" + - "auction_bid" + - "auction_competitiveness" + - "auction_max_competitor_bid" + - "buying_type" + - "campaign_id" + - "campaign_name" + - "canvas_avg_view_percent" + - "canvas_avg_view_time" + - "catalog_segment_actions" + - "catalog_segment_value" + - "catalog_segment_value_mobile_purchase_roas" + - "catalog_segment_value_omni_purchase_roas" + - "catalog_segment_value_website_purchase_roas" + - "clicks" + - "conversion_rate_ranking" + - "conversion_values" + - "conversions" + - "converted_product_quantity" + - "converted_product_value" + - "cost_per_15_sec_video_view" + - "cost_per_2_sec_continuous_video_view" + - "cost_per_action_type" + - "cost_per_ad_click" + - "cost_per_conversion" + - "cost_per_dda_countby_convs" + - "cost_per_estimated_ad_recallers" + - "cost_per_inline_link_click" + - "cost_per_inline_post_engagement" + - "cost_per_one_thousand_ad_impression" + - "cost_per_outbound_click" + - "cost_per_thruplay" + - "cost_per_unique_action_type" + - "cost_per_unique_click" + - "cost_per_unique_conversion" + - "cost_per_unique_inline_link_click" + - "cost_per_unique_outbound_click" + - "cpc" + - "cpm" + - "cpp" + - "created_time" + - "creative_media_type" + - "ctr" + - "date_start" + - "date_stop" + - "dda_countby_convs" + - "dda_results" + - "engagement_rate_ranking" + - "estimated_ad_recall_rate" + - "estimated_ad_recall_rate_lower_bound" + - "estimated_ad_recall_rate_upper_bound" + - "estimated_ad_recallers" + - "estimated_ad_recallers_lower_bound" + - "estimated_ad_recallers_upper_bound" + - "frequency" + - "full_view_impressions" + - "full_view_reach" + - "gender_targeting" + - "impressions" + - "inline_link_click_ctr" + - "inline_link_clicks" + - "inline_post_engagement" + - "instagram_upcoming_event_reminders_set" + - "instant_experience_clicks_to_open" + - "instant_experience_clicks_to_start" + - "instant_experience_outbound_clicks" + - "interactive_component_tap" + - "labels" + - "location" + - "marketing_messages_cost_per_delivered" + - "marketing_messages_cost_per_link_btn_click" + - "marketing_messages_spend" + - "mobile_app_purchase_roas" + - "objective" + - "optimization_goal" + - "outbound_clicks" + - "outbound_clicks_ctr" + - "place_page_name" + - "purchase_roas" + - "qualifying_question_qualify_answer_rate" + - "quality_ranking" + - "reach" + - "social_spend" + - "spend" + - "total_postbacks" + - "total_postbacks_detailed" + - "total_postbacks_detailed_v4" + - "unique_actions" + - "unique_clicks" + - "unique_conversions" + - "unique_ctr" + - "unique_inline_link_click_ctr" + - "unique_inline_link_clicks" + - "unique_link_clicks_ctr" + - "unique_outbound_clicks" + - "unique_outbound_clicks_ctr" + - "unique_video_continuous_2_sec_watched_actions" + - "unique_video_view_15_sec" + - "updated_time" + - "video_15_sec_watched_actions" + - "video_30_sec_watched_actions" + - "video_avg_time_watched_actions" + - "video_continuous_2_sec_watched_actions" + - "video_p100_watched_actions" + - "video_p25_watched_actions" + - "video_p50_watched_actions" + - "video_p75_watched_actions" + - "video_p95_watched_actions" + - "video_play_actions" + - "video_play_curve_actions" + - "video_play_retention_0_to_15s_actions" + - "video_play_retention_20_to_60s_actions" + - "video_play_retention_graph_actions" + - "video_thruplay_watched_actions" + - "video_time_watched_actions" + - "website_ctr" + - "website_purchase_roas" + - "wish_bid" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + title: "ValidBreakdowns" + description: "An enumeration." + enum: + - "ad_format_asset" + - "age" + - "app_id" + - "body_asset" + - "call_to_action_asset" + - "coarse_conversion_value" + - "country" + - "description_asset" + - "device_platform" + - "dma" + - "fidelity_type" + - "frequency_value" + - "gender" + - "hourly_stats_aggregated_by_advertiser_time_zone" + - "hourly_stats_aggregated_by_audience_time_zone" + - "hsid" + - "image_asset" + - "impression_device" + - "is_conversion_id_modeled" + - "landing_destination" + - "link_url_asset" + - "marketing_messages_btn_name" + - "mdsa_landing_destination" + - "media_asset_url" + - "media_creator" + - "media_destination_url" + - "media_format" + - "media_origin_url" + - "media_text_content" + - "mmm" + - "place_page_id" + - "platform_position" + - "postback_sequence_index" + - "product_id" + - "publisher_platform" + - "redownload" + - "region" + - "skan_campaign_id" + - "skan_conversion_id" + - "skan_version" + - "standard_event_content_type" + - "title_asset" + - "video_asset" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + title: "ValidActionBreakdowns" + description: "An enumeration." + enum: + - "action_canvas_component_name" + - "action_carousel_card_id" + - "action_carousel_card_name" + - "action_destination" + - "action_device" + - "action_reaction" + - "action_target_id" + - "action_type" + - "action_video_sound" + - "action_video_type" + - "standard_event_content_type" + action_report_time: + title: "Action Report Time" + description: + "Determines the report time of action stats. For example,\ + \ if a person saw the ad on Jan 1st but converted on Jan 2nd, when\ + \ you query the API with action_report_time=impression, you see\ + \ a conversion on Jan 1st. When you query the API with action_report_time=conversion,\ + \ you see a conversion on Jan 2nd." + default: "mixed" + enum: + - "conversion" + - "impression" + - "mixed" + type: "string" + time_increment: + title: "Time Increment" + description: + "Time window in days by which to aggregate statistics.\ + \ The sync will be chunked into N day intervals, where N is the\ + \ number of days you specified. For example, if you set this value\ + \ to 7, then all statistics will be reported as 7-day aggregates\ + \ by starting from the start_date. If the start and end dates are\ + \ October 1st and October 30th, then the connector will output 5\ + \ records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days\ + \ only). The minimum allowed value for this field is 1, and the\ + \ maximum is 89." + default: 1 + maximum: 89 + minimum: 1 + exclusiveMinimum: 0 + type: "integer" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not\ + \ setting this option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + insights_lookback_window: + title: "Custom Insights Lookback Window" + description: "The attribution window" + default: 28 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Custom Insights Job Timeout" + description: "The insights job timeout" + default: 60 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + page_size: + title: "Page Size of Requests" + description: + "Page size used when sending requests to Facebook API to specify\ + \ number of records per page when response has pagination. Most users\ + \ do not need to set this field unless they specifically need to tune\ + \ the connector to address specific issues or use cases." + default: 100 + order: 10 + exclusiveMinimum: 0 + type: "integer" + insights_lookback_window: + title: "Insights Lookback Window" + description: + "The attribution window. Facebook freezes insight data 28 days\ + \ after it was generated, which means that all data from the past 28 days\ + \ may have changed since we last emitted it, so you can retrieve refreshed\ + \ insights from the past by setting this parameter. If you set a custom\ + \ lookback window value in Facebook account, please provide the same value\ + \ here." + default: 28 + order: 11 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Insights Job Timeout" + description: + "Insights Job Timeout establishes the maximum amount of time\ + \ (in minutes) of waiting for the report job to complete. When timeout\ + \ is reached the job is considered failed and we are trying to request\ + \ smaller amount of data by breaking the job to few smaller ones. If you\ + \ definitely know that 60 minutes is not enough for your report to be\ + \ processed then you can decrease the timeout value, so we start breaking\ + \ job to smaller parts faster." + default: 60 + order: 12 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + sourceType: + title: "facebook-marketing" + const: "facebook-marketing" + enum: + - "facebook-marketing" + order: 0 + type: "string" + required: + - "account_ids" + - "credentials" + - "sourceType" + source-facebook-marketing-update: + title: "Source Facebook Marketing" + type: "object" + properties: + account_ids: + title: "Ad Account ID(s)" + description: + "The Facebook Ad account ID(s) to pull data from. The Ad account\ + \ ID number is in the account dropdown menu or in your browser's address\ + \ bar of your Meta Ads Manager. See the docs for more information." + order: 0 + pattern_descriptor: "The Ad Account ID must be a number." + examples: + - "111111111111111" + minItems: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]+$" + uniqueItems: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’s Dashboard,\ + \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\ + \ ads_read, read_insights, business_management. Then click on \"Get\ + \ token\". See the docs for more information." + order: 1 + airbyte_secret: true + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Facebook Marketing API" + type: "object" + oneOf: + - title: "Authenticate via Facebook Marketing (Oauth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Facebook Marketing API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Facebook Marketing API" + airbyte_secret: true + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + required: + - "client_id" + - "client_secret" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + required: + - "access_token" + - "auth_type" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for all incremental\ + \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\ + \ will be replicated for usual streams and only last 2 years for insight\ + \ streams." + order: 2 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not setting\ + \ this option will result in always syncing the latest data." + order: 3 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + campaign_statuses: + title: "Campaign Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 4 + type: "array" + items: + title: "ValidCampaignStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + adset_statuses: + title: "AdSet Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 5 + type: "array" + items: + title: "ValidAdSetStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + ad_statuses: + title: "Ad Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 6 + type: "array" + items: + title: "ValidAdStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ADSET_PAUSED" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "DISAPPROVED" + - "IN_PROCESS" + - "PAUSED" + - "PENDING_BILLING_INFO" + - "PENDING_REVIEW" + - "PREAPPROVED" + - "WITH_ISSUES" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images from Ad Creative" + description: + "Set to active if you want to fetch the thumbnail_url and store\ + \ the result in thumbnail_data_url for each Ad Creative." + default: false + order: 7 + type: "boolean" + custom_insights: + title: "Custom Insights" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + order: 8 + type: "array" + items: + title: "InsightConfig" + description: "Config for custom insights" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + level: + title: "Level" + description: "Chosen level for API" + default: "ad" + enum: + - "ad" + - "adset" + - "campaign" + - "account" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "account_currency" + - "account_id" + - "account_name" + - "action_values" + - "actions" + - "ad_click_actions" + - "ad_id" + - "ad_impression_actions" + - "ad_name" + - "adset_end" + - "adset_id" + - "adset_name" + - "age_targeting" + - "attribution_setting" + - "auction_bid" + - "auction_competitiveness" + - "auction_max_competitor_bid" + - "buying_type" + - "campaign_id" + - "campaign_name" + - "canvas_avg_view_percent" + - "canvas_avg_view_time" + - "catalog_segment_actions" + - "catalog_segment_value" + - "catalog_segment_value_mobile_purchase_roas" + - "catalog_segment_value_omni_purchase_roas" + - "catalog_segment_value_website_purchase_roas" + - "clicks" + - "conversion_rate_ranking" + - "conversion_values" + - "conversions" + - "converted_product_quantity" + - "converted_product_value" + - "cost_per_15_sec_video_view" + - "cost_per_2_sec_continuous_video_view" + - "cost_per_action_type" + - "cost_per_ad_click" + - "cost_per_conversion" + - "cost_per_dda_countby_convs" + - "cost_per_estimated_ad_recallers" + - "cost_per_inline_link_click" + - "cost_per_inline_post_engagement" + - "cost_per_one_thousand_ad_impression" + - "cost_per_outbound_click" + - "cost_per_thruplay" + - "cost_per_unique_action_type" + - "cost_per_unique_click" + - "cost_per_unique_conversion" + - "cost_per_unique_inline_link_click" + - "cost_per_unique_outbound_click" + - "cpc" + - "cpm" + - "cpp" + - "created_time" + - "creative_media_type" + - "ctr" + - "date_start" + - "date_stop" + - "dda_countby_convs" + - "dda_results" + - "engagement_rate_ranking" + - "estimated_ad_recall_rate" + - "estimated_ad_recall_rate_lower_bound" + - "estimated_ad_recall_rate_upper_bound" + - "estimated_ad_recallers" + - "estimated_ad_recallers_lower_bound" + - "estimated_ad_recallers_upper_bound" + - "frequency" + - "full_view_impressions" + - "full_view_reach" + - "gender_targeting" + - "impressions" + - "inline_link_click_ctr" + - "inline_link_clicks" + - "inline_post_engagement" + - "instagram_upcoming_event_reminders_set" + - "instant_experience_clicks_to_open" + - "instant_experience_clicks_to_start" + - "instant_experience_outbound_clicks" + - "interactive_component_tap" + - "labels" + - "location" + - "marketing_messages_cost_per_delivered" + - "marketing_messages_cost_per_link_btn_click" + - "marketing_messages_spend" + - "mobile_app_purchase_roas" + - "objective" + - "optimization_goal" + - "outbound_clicks" + - "outbound_clicks_ctr" + - "place_page_name" + - "purchase_roas" + - "qualifying_question_qualify_answer_rate" + - "quality_ranking" + - "reach" + - "social_spend" + - "spend" + - "total_postbacks" + - "total_postbacks_detailed" + - "total_postbacks_detailed_v4" + - "unique_actions" + - "unique_clicks" + - "unique_conversions" + - "unique_ctr" + - "unique_inline_link_click_ctr" + - "unique_inline_link_clicks" + - "unique_link_clicks_ctr" + - "unique_outbound_clicks" + - "unique_outbound_clicks_ctr" + - "unique_video_continuous_2_sec_watched_actions" + - "unique_video_view_15_sec" + - "updated_time" + - "video_15_sec_watched_actions" + - "video_30_sec_watched_actions" + - "video_avg_time_watched_actions" + - "video_continuous_2_sec_watched_actions" + - "video_p100_watched_actions" + - "video_p25_watched_actions" + - "video_p50_watched_actions" + - "video_p75_watched_actions" + - "video_p95_watched_actions" + - "video_play_actions" + - "video_play_curve_actions" + - "video_play_retention_0_to_15s_actions" + - "video_play_retention_20_to_60s_actions" + - "video_play_retention_graph_actions" + - "video_thruplay_watched_actions" + - "video_time_watched_actions" + - "website_ctr" + - "website_purchase_roas" + - "wish_bid" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + title: "ValidBreakdowns" + description: "An enumeration." + enum: + - "ad_format_asset" + - "age" + - "app_id" + - "body_asset" + - "call_to_action_asset" + - "coarse_conversion_value" + - "country" + - "description_asset" + - "device_platform" + - "dma" + - "fidelity_type" + - "frequency_value" + - "gender" + - "hourly_stats_aggregated_by_advertiser_time_zone" + - "hourly_stats_aggregated_by_audience_time_zone" + - "hsid" + - "image_asset" + - "impression_device" + - "is_conversion_id_modeled" + - "landing_destination" + - "link_url_asset" + - "marketing_messages_btn_name" + - "mdsa_landing_destination" + - "media_asset_url" + - "media_creator" + - "media_destination_url" + - "media_format" + - "media_origin_url" + - "media_text_content" + - "mmm" + - "place_page_id" + - "platform_position" + - "postback_sequence_index" + - "product_id" + - "publisher_platform" + - "redownload" + - "region" + - "skan_campaign_id" + - "skan_conversion_id" + - "skan_version" + - "standard_event_content_type" + - "title_asset" + - "video_asset" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + title: "ValidActionBreakdowns" + description: "An enumeration." + enum: + - "action_canvas_component_name" + - "action_carousel_card_id" + - "action_carousel_card_name" + - "action_destination" + - "action_device" + - "action_reaction" + - "action_target_id" + - "action_type" + - "action_video_sound" + - "action_video_type" + - "standard_event_content_type" + action_report_time: + title: "Action Report Time" + description: + "Determines the report time of action stats. For example,\ + \ if a person saw the ad on Jan 1st but converted on Jan 2nd, when\ + \ you query the API with action_report_time=impression, you see\ + \ a conversion on Jan 1st. When you query the API with action_report_time=conversion,\ + \ you see a conversion on Jan 2nd." + default: "mixed" + enum: + - "conversion" + - "impression" + - "mixed" + type: "string" + time_increment: + title: "Time Increment" + description: + "Time window in days by which to aggregate statistics.\ + \ The sync will be chunked into N day intervals, where N is the\ + \ number of days you specified. For example, if you set this value\ + \ to 7, then all statistics will be reported as 7-day aggregates\ + \ by starting from the start_date. If the start and end dates are\ + \ October 1st and October 30th, then the connector will output 5\ + \ records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days\ + \ only). The minimum allowed value for this field is 1, and the\ + \ maximum is 89." + default: 1 + maximum: 89 + minimum: 1 + exclusiveMinimum: 0 + type: "integer" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not\ + \ setting this option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + insights_lookback_window: + title: "Custom Insights Lookback Window" + description: "The attribution window" + default: 28 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Custom Insights Job Timeout" + description: "The insights job timeout" + default: 60 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + page_size: + title: "Page Size of Requests" + description: + "Page size used when sending requests to Facebook API to specify\ + \ number of records per page when response has pagination. Most users\ + \ do not need to set this field unless they specifically need to tune\ + \ the connector to address specific issues or use cases." + default: 100 + order: 10 + exclusiveMinimum: 0 + type: "integer" + insights_lookback_window: + title: "Insights Lookback Window" + description: + "The attribution window. Facebook freezes insight data 28 days\ + \ after it was generated, which means that all data from the past 28 days\ + \ may have changed since we last emitted it, so you can retrieve refreshed\ + \ insights from the past by setting this parameter. If you set a custom\ + \ lookback window value in Facebook account, please provide the same value\ + \ here." + default: 28 + order: 11 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Insights Job Timeout" + description: + "Insights Job Timeout establishes the maximum amount of time\ + \ (in minutes) of waiting for the report job to complete. When timeout\ + \ is reached the job is considered failed and we are trying to request\ + \ smaller amount of data by breaking the job to few smaller ones. If you\ + \ definitely know that 60 minutes is not enough for your report to be\ + \ processed then you can decrease the timeout value, so we start breaking\ + \ job to smaller parts faster." + default: 60 + order: 12 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "account_ids" + - "credentials" + source-recruitee: + type: "object" + required: + - "api_key" + - "company_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recruitee API Key. See here." + order: 0 + x-speakeasy-param-sensitive: true + company_id: + type: "integer" + title: "Company ID" + description: + "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + order: 1 + sourceType: + title: "recruitee" + const: "recruitee" + enum: + - "recruitee" + order: 0 + type: "string" + source-recruitee-update: + type: "object" + required: + - "api_key" + - "company_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recruitee API Key. See here." + order: 0 + company_id: + type: "integer" + title: "Company ID" + description: + "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + order: 1 + source-airbyte: + type: "object" + required: + - "start_date" + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 1 + title: "client_id" + start_date: + type: "string" + order: 0 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_secret: + type: "string" + order: 2 + title: "client_secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "airbyte" + const: "airbyte" + enum: + - "airbyte" + order: 0 + type: "string" + source-airbyte-update: + type: "object" + required: + - "start_date" + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 1 + title: "client_id" + start_date: + type: "string" + order: 0 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_secret: + type: "string" + order: 2 + title: "client_secret" + airbyte_secret: true + source-survey-sparrow: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Your access token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + region: + type: "object" + title: "Base URL" + description: + "Is your account location is EU based? If yes, the base url\ + \ to retrieve data will be different." + oneOf: + - type: "object" + title: "EU-based account" + properties: + url_base: + type: "string" + const: "https://eu-api.surveysparrow.com/v3" + enum: + - "https://eu-api.surveysparrow.com/v3" + - type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + default: + type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + order: 1 + survey_id: + type: "array" + description: "A List of your survey ids for survey-specific stream" + order: 2 + sourceType: + title: "survey-sparrow" + const: "survey-sparrow" + enum: + - "survey-sparrow" + order: 0 + type: "string" + source-survey-sparrow-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + description: + "Your access token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + region: + type: "object" + title: "Base URL" + description: + "Is your account location is EU based? If yes, the base url\ + \ to retrieve data will be different." + oneOf: + - type: "object" + title: "EU-based account" + properties: + url_base: + type: "string" + const: "https://eu-api.surveysparrow.com/v3" + enum: + - "https://eu-api.surveysparrow.com/v3" + - type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + default: + type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + order: 1 + survey_id: + type: "array" + description: "A List of your survey ids for survey-specific stream" + order: 2 + source-azure-table: + title: "Azure Data Table Spec" + type: "object" + required: + - "storage_account_name" + - "storage_access_key" + - "sourceType" + properties: + storage_account_name: + title: "Account Name" + type: "string" + description: "The name of your storage account." + order: 0 + airbyte_secret: false + x-speakeasy-param-sensitive: true + storage_access_key: + title: "Access Key" + type: "string" + description: + "Azure Table Storage Access Key. See the docs for more information on how to obtain this key." + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + storage_endpoint_suffix: + title: "Endpoint Suffix" + type: "string" + description: + "Azure Table Storage service account URL suffix. See the docs\ + \ for more information on how to obtain endpoint suffix" + order: 2 + default: "core.windows.net" + examples: + - "core.windows.net" + - "core.chinacloudapi.cn" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "azure-table" + const: "azure-table" + enum: + - "azure-table" + order: 0 + type: "string" + source-azure-table-update: + title: "Azure Data Table Spec" + type: "object" + required: + - "storage_account_name" + - "storage_access_key" + properties: + storage_account_name: + title: "Account Name" + type: "string" + description: "The name of your storage account." + order: 0 + airbyte_secret: false + storage_access_key: + title: "Access Key" + type: "string" + description: + "Azure Table Storage Access Key. See the docs for more information on how to obtain this key." + order: 1 + airbyte_secret: true + storage_endpoint_suffix: + title: "Endpoint Suffix" + type: "string" + description: + "Azure Table Storage service account URL suffix. See the docs\ + \ for more information on how to obtain endpoint suffix" + order: 2 + default: "core.windows.net" + examples: + - "core.windows.net" + - "core.chinacloudapi.cn" + airbyte_secret: false + source-customer-io: + type: "object" + required: + - "app_api_key" + - "sourceType" + properties: + app_api_key: + type: "string" + title: "Customer.io App API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "customer-io" + const: "customer-io" + enum: + - "customer-io" + order: 0 + type: "string" + source-customer-io-update: + type: "object" + required: + - "app_api_key" + properties: + app_api_key: + type: "string" + title: "Customer.io App API Key" + airbyte_secret: true + order: 0 + source-surveymonkey: + type: "object" + required: + - "start_date" + - "credentials" + - "sourceType" + properties: + origin: + type: "string" + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: + "Depending on the originating datacenter of the SurveyMonkey\ + \ account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: "object" + required: + - "auth_method" + - "access_token" + order: 2 + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + order: 3 + type: "string" + airbyte_secret: true + description: + "Access Token for making authenticated requests. See the\ + \ docs for information on how to generate this key." + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + order: 3 + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: "array" + order: 1000 + items: + type: "string" + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all boards to which you have access will be\ + \ replicated." + sourceType: + title: "surveymonkey" + const: "surveymonkey" + enum: + - "surveymonkey" + order: 0 + type: "string" + source-surveymonkey-update: + type: "object" + required: + - "start_date" + - "credentials" + properties: + origin: + type: "string" + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: + "Depending on the originating datacenter of the SurveyMonkey\ + \ account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: "object" + required: + - "auth_method" + - "access_token" + order: 2 + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + access_token: + title: "Access Token" + order: 3 + type: "string" + airbyte_secret: true + description: + "Access Token for making authenticated requests. See the\ + \ docs for information on how to generate this key." + start_date: + title: "Start Date" + order: 3 + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: "array" + order: 1000 + items: + type: "string" + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all boards to which you have access will be\ + \ replicated." + source-persistiq: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "PersistIq API Key. See the docs for more information on where to find that key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "persistiq" + const: "persistiq" + enum: + - "persistiq" + order: 0 + type: "string" + source-persistiq-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "PersistIq API Key. See the docs for more information on where to find that key." + airbyte_secret: true + order: 0 + source-configcat: + type: "object" + required: + - "username" + - "password" + - "sourceType" + properties: + username: + type: "string" + description: + "Basic auth user name. See here." + title: "Username" + order: 0 + password: + type: "string" + description: + "Basic auth password. See here." + title: "Password" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "configcat" + const: "configcat" + enum: + - "configcat" + order: 0 + type: "string" + source-configcat-update: + type: "object" + required: + - "username" + - "password" + properties: + username: + type: "string" + description: + "Basic auth user name. See here." + title: "Username" + order: 0 + password: + type: "string" + description: + "Basic auth password. See here." + title: "Password" + airbyte_secret: true + order: 1 + source-reddit: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + description: "Specifies the query for searching in reddits and subreddits" + order: 1 + title: "Query" + default: "airbyte" + include_over_18: + type: "boolean" + description: "Includes mature content" + order: 2 + title: "Include over 18 flag" + default: false + exact: + type: "boolean" + description: "Specifies exact keyword and reduces distractions" + order: 3 + title: "Exact" + limit: + type: "number" + description: "Max records per page limit" + order: 4 + title: "Limit" + default: "1000" + subreddits: + type: "array" + description: "Subreddits for exploration" + order: 5 + title: "Subreddits" + default: + - "r/funny" + - "r/AskReddit" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "reddit" + const: "reddit" + enum: + - "reddit" + order: 0 + type: "string" + source-reddit-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + query: + type: "string" + description: "Specifies the query for searching in reddits and subreddits" + order: 1 + title: "Query" + default: "airbyte" + include_over_18: + type: "boolean" + description: "Includes mature content" + order: 2 + title: "Include over 18 flag" + default: false + exact: + type: "boolean" + description: "Specifies exact keyword and reduces distractions" + order: 3 + title: "Exact" + limit: + type: "number" + description: "Max records per page limit" + order: 4 + title: "Limit" + default: "1000" + subreddits: + type: "array" + description: "Subreddits for exploration" + order: 5 + title: "Subreddits" + default: + - "r/funny" + - "r/AskReddit" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-insightly: + type: "object" + required: + - "start_date" + - "token" + - "sourceType" + properties: + start_date: + type: + - "string" + - "null" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Insightly\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only for incremental streams." + examples: + - "2021-03-01T00:00:00Z" + order: 0 + token: + type: + - "string" + - "null" + title: "API Token" + description: "Your Insightly API token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "insightly" + const: "insightly" + enum: + - "insightly" + order: 0 + type: "string" + source-insightly-update: + type: "object" + required: + - "start_date" + - "token" + properties: + start_date: + type: + - "string" + - "null" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Insightly\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only for incremental streams." + examples: + - "2021-03-01T00:00:00Z" + order: 0 + token: + type: + - "string" + - "null" + title: "API Token" + description: "Your Insightly API token." + airbyte_secret: true + order: 1 + source-cart: + title: "Cart.com Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Central API Router" + type: "object" + order: 0 + required: + - "auth_type" + - "user_name" + - "user_secret" + - "site_id" + properties: + auth_type: + type: "string" + const: "CENTRAL_API_ROUTER" + order: 0 + enum: + - "CENTRAL_API_ROUTER" + user_name: + type: "string" + title: "User Name" + description: "Enter your application's User Name" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + user_secret: + type: "string" + title: "User Secret" + description: "Enter your application's User Secret" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + site_id: + type: "string" + title: "Site ID" + description: + "You can determine a site provisioning site Id by hitting\ + \ https://site.com/store/sitemonitor.aspx and reading the response\ + \ param PSID" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "Single Store Access Token" + type: "object" + order: 1 + required: + - "auth_type" + - "access_token" + - "store_name" + properties: + auth_type: + type: "string" + const: "SINGLE_STORE_ACCESS_TOKEN" + order: 0 + enum: + - "SINGLE_STORE_ACCESS_TOKEN" + access_token: + type: "string" + title: "Access Token" + airbyte_secret: true + order: 1 + description: "Access Token for making authenticated requests." + x-speakeasy-param-sensitive: true + store_name: + type: "string" + title: "Store Name" + order: 2 + description: + "The name of Cart.com Online Store. All API URLs start\ + \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\ + \ is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + sourceType: + title: "cart" + const: "cart" + enum: + - "cart" + order: 0 + type: "string" + source-cart-update: + title: "Cart.com Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Central API Router" + type: "object" + order: 0 + required: + - "auth_type" + - "user_name" + - "user_secret" + - "site_id" + properties: + auth_type: + type: "string" + const: "CENTRAL_API_ROUTER" + order: 0 + enum: + - "CENTRAL_API_ROUTER" + user_name: + type: "string" + title: "User Name" + description: "Enter your application's User Name" + airbyte_secret: true + order: 1 + user_secret: + type: "string" + title: "User Secret" + description: "Enter your application's User Secret" + airbyte_secret: true + order: 2 + site_id: + type: "string" + title: "Site ID" + description: + "You can determine a site provisioning site Id by hitting\ + \ https://site.com/store/sitemonitor.aspx and reading the response\ + \ param PSID" + airbyte_secret: true + order: 3 + - title: "Single Store Access Token" + type: "object" + order: 1 + required: + - "auth_type" + - "access_token" + - "store_name" + properties: + auth_type: + type: "string" + const: "SINGLE_STORE_ACCESS_TOKEN" + order: 0 + enum: + - "SINGLE_STORE_ACCESS_TOKEN" + access_token: + type: "string" + title: "Access Token" + airbyte_secret: true + order: 1 + description: "Access Token for making authenticated requests." + store_name: + type: "string" + title: "Store Name" + order: 2 + description: + "The name of Cart.com Online Store. All API URLs start\ + \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\ + \ is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + source-oracle: + title: "Oracle Source Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sourceType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 1 + port: + title: "Port" + description: + "Port of the database.\nOracle Corporations recommends the\ + \ following port numbers:\n1521 - Default listening port for client connections\ + \ to the listener. \n2484 - Recommended and officially registered listening\ + \ port for client connections to the listener using TCP/IP with SSL" + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + order: 2 + connection_data: + title: "Connect by" + type: "object" + description: "Connect data that will be used for DB connection" + order: 3 + oneOf: + - title: "Service name" + description: "Use service name" + required: + - "service_name" + properties: + connection_type: + type: "string" + const: "service_name" + order: 0 + enum: + - "service_name" + service_name: + title: "Service name" + type: "string" + order: 1 + - title: "System ID (SID)" + description: "Use SID (Oracle System Identifier)" + required: + - "sid" + properties: + connection_type: + type: "string" + const: "sid" + order: 0 + enum: + - "sid" + sid: + title: "System ID (SID)" + type: "string" + order: 1 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 1 + uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method with is used when communicating with\ + \ the database." + order: 8 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines what encryption algorithm is\ + \ used." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM File" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "oracle" + const: "oracle" + enum: + - "oracle" + order: 0 + type: "string" + source-oracle-update: + title: "Oracle Source Spec" + type: "object" + required: + - "host" + - "port" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 1 + port: + title: "Port" + description: + "Port of the database.\nOracle Corporations recommends the\ + \ following port numbers:\n1521 - Default listening port for client connections\ + \ to the listener. \n2484 - Recommended and officially registered listening\ + \ port for client connections to the listener using TCP/IP with SSL" + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + order: 2 + connection_data: + title: "Connect by" + type: "object" + description: "Connect data that will be used for DB connection" + order: 3 + oneOf: + - title: "Service name" + description: "Use service name" + required: + - "service_name" + properties: + connection_type: + type: "string" + const: "service_name" + order: 0 + enum: + - "service_name" + service_name: + title: "Service name" + type: "string" + order: 1 + - title: "System ID (SID)" + description: "Use SID (Oracle System Identifier)" + required: + - "sid" + properties: + connection_type: + type: "string" + const: "sid" + order: 0 + enum: + - "sid" + sid: + title: "System ID (SID)" + type: "string" + order: 1 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 1 + uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method with is used when communicating with\ + \ the database." + order: 8 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines what encryption algorithm is\ + \ used." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM File" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + order: 4 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-appfollow: + type: "object" + required: + - "sourceType" + properties: + api_secret: + type: "string" + description: "API Key provided by Appfollow" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "appfollow" + const: "appfollow" + enum: + - "appfollow" + order: 0 + type: "string" + source-appfollow-update: + type: "object" + required: [] + properties: + api_secret: + type: "string" + description: "API Key provided by Appfollow" + title: "API Key" + airbyte_secret: true + order: 0 + source-chartmogul: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your Chartmogul API key. See the docs for info on how to obtain this." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. When\ + \ feasible, any data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 1 + format: "date-time" + sourceType: + title: "chartmogul" + const: "chartmogul" + enum: + - "chartmogul" + order: 0 + type: "string" + source-chartmogul-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your Chartmogul API key. See the docs for info on how to obtain this." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. When\ + \ feasible, any data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 1 + format: "date-time" + source-coinmarketcap: + type: "object" + required: + - "api_key" + - "data_type" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Your API Key. See here. The token is case sensitive." + order: 0 + x-speakeasy-param-sensitive: true + data_type: + type: "string" + title: "Data type" + enum: + - "latest" + - "historical" + description: + "/latest: Latest market ticker quotes and averages for cryptocurrencies\ + \ and exchanges. /historical: Intervals of historic market data like OHLCV\ + \ data or data for use in charting libraries. See here." + order: 1 + symbols: + type: "array" + title: "Symbol" + items: + type: "string" + description: "Cryptocurrency symbols. (only used for quotes stream)" + minItems: 1 + examples: + - "AVAX" + - "BTC" + order: 2 + sourceType: + title: "coinmarketcap" + const: "coinmarketcap" + enum: + - "coinmarketcap" + order: 0 + type: "string" + source-coinmarketcap-update: + type: "object" + required: + - "api_key" + - "data_type" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Your API Key. See here. The token is case sensitive." + order: 0 + data_type: + type: "string" + title: "Data type" + enum: + - "latest" + - "historical" + description: + "/latest: Latest market ticker quotes and averages for cryptocurrencies\ + \ and exchanges. /historical: Intervals of historic market data like OHLCV\ + \ data or data for use in charting libraries. See here." + order: 1 + symbols: + type: "array" + title: "Symbol" + items: + type: "string" + description: "Cryptocurrency symbols. (only used for quotes stream)" + minItems: 1 + examples: + - "AVAX" + - "BTC" + order: 2 + source-dixa: + type: "object" + required: + - "api_token" + - "start_date" + - "sourceType" + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The connector pulls records updated from this date onwards." + examples: + - "YYYY-MM-DD" + order: 3 + sourceType: + title: "dixa" + const: "dixa" + enum: + - "dixa" + order: 0 + type: "string" + source-dixa-update: + type: "object" + required: + - "api_token" + - "start_date" + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + order: 1 + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The connector pulls records updated from this date onwards." + examples: + - "YYYY-MM-DD" + order: 3 + source-freshcaller: + title: "Freshcaller Spec" + type: "object" + required: + - "domain" + - "api_key" + - "sourceType" + properties: + domain: + type: "string" + title: "Domain for Freshcaller account" + description: "Used to construct Base URL for the Freshcaller APIs" + examples: + - "snaptravel" + api_key: + type: "string" + title: "API Key" + description: + "Freshcaller API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + description: + "UTC date and time. Any data created after this date will be\ + \ replicated." + format: "date-time" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-01-01T12:00:00Z" + sync_lag_minutes: + title: "Lag in minutes for each sync" + type: "integer" + description: + "Lag in minutes for each sync, i.e., at time T, data for the\ + \ time range [prev_sync_time, T-30] will be fetched" + sourceType: + title: "freshcaller" + const: "freshcaller" + enum: + - "freshcaller" + order: 0 + type: "string" + source-freshcaller-update: + title: "Freshcaller Spec" + type: "object" + required: + - "domain" + - "api_key" + properties: + domain: + type: "string" + title: "Domain for Freshcaller account" + description: "Used to construct Base URL for the Freshcaller APIs" + examples: + - "snaptravel" + api_key: + type: "string" + title: "API Key" + description: + "Freshcaller API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + description: + "UTC date and time. Any data created after this date will be\ + \ replicated." + format: "date-time" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-01-01T12:00:00Z" + sync_lag_minutes: + title: "Lag in minutes for each sync" + type: "integer" + description: + "Lag in minutes for each sync, i.e., at time T, data for the\ + \ time range [prev_sync_time, T-30] will be fetched" + source-recharge: + title: "Recharge Spec" + type: "object" + required: + - "start_date" + - "access_token" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Recharge\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will\ + \ not be replicated." + examples: + - "2021-05-14T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the Access Token generated. See the docs for\ + \ more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + use_orders_deprecated_api: + type: "boolean" + title: "Use `Orders` Deprecated API" + description: + "Define whether or not the `Orders` stream should use the deprecated\ + \ `2021-01` API version, or use `2021-11`, otherwise." + default: true + sourceType: + title: "recharge" + const: "recharge" + enum: + - "recharge" + order: 0 + type: "string" + source-recharge-update: + title: "Recharge Spec" + type: "object" + required: + - "start_date" + - "access_token" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Recharge\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will\ + \ not be replicated." + examples: + - "2021-05-14T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the Access Token generated. See the docs for\ + \ more information." + airbyte_secret: true + use_orders_deprecated_api: + type: "boolean" + title: "Use `Orders` Deprecated API" + description: + "Define whether or not the `Orders` stream should use the deprecated\ + \ `2021-01` API version, or use `2021-11`, otherwise." + default: true + source-aha: + type: "object" + required: + - "api_key" + - "url" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Bearer Token" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + order: 1 + sourceType: + title: "aha" + const: "aha" + enum: + - "aha" + order: 0 + type: "string" + source-aha-update: + type: "object" + required: + - "api_key" + - "url" + properties: + api_key: + type: "string" + title: "API Bearer Token" + airbyte_secret: true + description: "API Key" + order: 0 + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + order: 1 + source-brevo: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "brevo" + const: "brevo" + enum: + - "brevo" + order: 0 + type: "string" + source-brevo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-datascope: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "Authorization" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + order: 1 + sourceType: + title: "datascope" + const: "datascope" + enum: + - "datascope" + order: 0 + type: "string" + source-datascope-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "Authorization" + airbyte_secret: true + description: "API Key" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + order: 1 + source-metabase: + type: "object" + required: + - "instance_api_url" + - "username" + - "sourceType" + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "https://localhost:3000/api/" + pattern: "^https://" + order: 0 + username: + type: "string" + title: "Username" + order: 1 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + session_token: + type: "string" + description: + "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + sourceType: + title: "metabase" + const: "metabase" + enum: + - "metabase" + order: 0 + type: "string" + source-metabase-update: + type: "object" + required: + - "instance_api_url" + - "username" + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "https://localhost:3000/api/" + pattern: "^https://" + order: 0 + username: + type: "string" + title: "Username" + order: 1 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 2 + session_token: + type: "string" + description: + "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + source-bing-ads: + title: "Bing Ads Spec" + type: "object" + required: + - "developer_token" + - "client_id" + - "refresh_token" + - "sourceType" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + tenant_id: + type: "string" + title: "Tenant ID" + description: + "The Tenant ID of your Microsoft Advertising developer application.\ + \ Set this to \"common\" unless you know you need a different value." + airbyte_secret: true + default: "common" + order: 0 + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + developer_token: + type: "string" + title: "Developer Token" + description: + "Developer token associated with user. See more info in the docs." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + account_names: + title: "Account Names Predicates" + description: "Predicates that will be used to sync data by specific accounts." + type: "array" + order: 5 + items: + description: "Account Names Predicates Config." + type: "object" + properties: + operator: + title: "Operator" + description: + "An Operator that will be used to filter accounts. The\ + \ Contains predicate has features for matching words, matching inflectional\ + \ forms of words, searching using wildcard characters, and searching\ + \ using proximity. The Equals is used to return all rows where account\ + \ name is equal(=) to the string that you provided" + type: "string" + enum: + - "Contains" + - "Equals" + name: + title: "Account Name" + description: + "Account Name is a string value for comparing with the\ + \ specified predicate." + type: "string" + required: + - "operator" + - "name" + reports_start_date: + type: "string" + title: "Reports replication start date" + format: "date" + description: + "The start date from which to begin replicating report data.\ + \ Any data generated before this date will not be replicated in reports.\ + \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\ + \ and current calendar year will be replicated." + order: 6 + lookback_window: + title: "Lookback window" + description: + "Also known as attribution or conversion window. How far into\ + \ the past to look for records (in days). If your conversion window has\ + \ an hours/minutes granularity, round it up to the number of days exceeding.\ + \ Used only for performance report streams in incremental mode without\ + \ specified Reports Start Date." + type: "integer" + default: 0 + minimum: 0 + maximum: 90 + order: 7 + custom_reports: + title: "Custom Reports" + description: "You can add your Custom Bing Ads report by creating one." + order: 8 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Report Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + examples: + - "Account Performance" + - "AdDynamicTextPerformanceReport" + - "custom report" + reporting_object: + title: "Reporting Data Object" + description: + "The name of the the object derives from the ReportRequest\ + \ object. You can find it in Bing Ads Api docs - Reporting API -\ + \ Reporting Data Objects." + type: "string" + enum: + - "AccountPerformanceReportRequest" + - "AdDynamicTextPerformanceReportRequest" + - "AdExtensionByAdReportRequest" + - "AdExtensionByKeywordReportRequest" + - "AdExtensionDetailReportRequest" + - "AdGroupPerformanceReportRequest" + - "AdPerformanceReportRequest" + - "AgeGenderAudienceReportRequest" + - "AudiencePerformanceReportRequest" + - "CallDetailReportRequest" + - "CampaignPerformanceReportRequest" + - "ConversionPerformanceReportRequest" + - "DestinationUrlPerformanceReportRequest" + - "DSAAutoTargetPerformanceReportRequest" + - "DSACategoryPerformanceReportRequest" + - "DSASearchQueryPerformanceReportRequest" + - "GeographicPerformanceReportRequest" + - "GoalsAndFunnelsReportRequest" + - "HotelDimensionPerformanceReportRequest" + - "HotelGroupPerformanceReportRequest" + - "KeywordPerformanceReportRequest" + - "NegativeKeywordConflictReportRequest" + - "ProductDimensionPerformanceReportRequest" + - "ProductMatchCountReportRequest" + - "ProductNegativeKeywordConflictReportRequest" + - "ProductPartitionPerformanceReportRequest" + - "ProductPartitionUnitPerformanceReportRequest" + - "ProductSearchQueryPerformanceReportRequest" + - "ProfessionalDemographicsAudienceReportRequest" + - "PublisherUsagePerformanceReportRequest" + - "SearchCampaignChangeHistoryReportRequest" + - "SearchQueryPerformanceReportRequest" + - "ShareOfVoiceReportRequest" + - "UserLocationPerformanceReportRequest" + report_columns: + title: "Columns" + description: + "A list of available report object columns. You can find\ + \ it in description of reporting object that you want to add to\ + \ custom report." + type: "array" + items: + description: "Name of report column." + type: "string" + minItems: 1 + report_aggregation: + title: "Aggregation" + description: "A list of available aggregations." + type: "string" + items: + title: "ValidEnums" + description: "An enumeration of aggregations." + enum: + - "Hourly" + - "Daily" + - "Weekly" + - "Monthly" + - "DayOfWeek" + - "HourOfDay" + - "WeeklyStartingMonday" + - "Summary" + default: + - "Hourly" + required: + - "name" + - "reporting_object" + - "report_columns" + - "report_aggregation" + sourceType: + title: "bing-ads" + const: "bing-ads" + enum: + - "bing-ads" + order: 0 + type: "string" + source-bing-ads-update: + title: "Bing Ads Spec" + type: "object" + required: + - "developer_token" + - "client_id" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + tenant_id: + type: "string" + title: "Tenant ID" + description: + "The Tenant ID of your Microsoft Advertising developer application.\ + \ Set this to \"common\" unless you know you need a different value." + airbyte_secret: true + default: "common" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 3 + developer_token: + type: "string" + title: "Developer Token" + description: + "Developer token associated with user. See more info in the docs." + airbyte_secret: true + order: 4 + account_names: + title: "Account Names Predicates" + description: "Predicates that will be used to sync data by specific accounts." + type: "array" + order: 5 + items: + description: "Account Names Predicates Config." + type: "object" + properties: + operator: + title: "Operator" + description: + "An Operator that will be used to filter accounts. The\ + \ Contains predicate has features for matching words, matching inflectional\ + \ forms of words, searching using wildcard characters, and searching\ + \ using proximity. The Equals is used to return all rows where account\ + \ name is equal(=) to the string that you provided" + type: "string" + enum: + - "Contains" + - "Equals" + name: + title: "Account Name" + description: + "Account Name is a string value for comparing with the\ + \ specified predicate." + type: "string" + required: + - "operator" + - "name" + reports_start_date: + type: "string" + title: "Reports replication start date" + format: "date" + description: + "The start date from which to begin replicating report data.\ + \ Any data generated before this date will not be replicated in reports.\ + \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\ + \ and current calendar year will be replicated." + order: 6 + lookback_window: + title: "Lookback window" + description: + "Also known as attribution or conversion window. How far into\ + \ the past to look for records (in days). If your conversion window has\ + \ an hours/minutes granularity, round it up to the number of days exceeding.\ + \ Used only for performance report streams in incremental mode without\ + \ specified Reports Start Date." + type: "integer" + default: 0 + minimum: 0 + maximum: 90 + order: 7 + custom_reports: + title: "Custom Reports" + description: "You can add your Custom Bing Ads report by creating one." + order: 8 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Report Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + examples: + - "Account Performance" + - "AdDynamicTextPerformanceReport" + - "custom report" + reporting_object: + title: "Reporting Data Object" + description: + "The name of the the object derives from the ReportRequest\ + \ object. You can find it in Bing Ads Api docs - Reporting API -\ + \ Reporting Data Objects." + type: "string" + enum: + - "AccountPerformanceReportRequest" + - "AdDynamicTextPerformanceReportRequest" + - "AdExtensionByAdReportRequest" + - "AdExtensionByKeywordReportRequest" + - "AdExtensionDetailReportRequest" + - "AdGroupPerformanceReportRequest" + - "AdPerformanceReportRequest" + - "AgeGenderAudienceReportRequest" + - "AudiencePerformanceReportRequest" + - "CallDetailReportRequest" + - "CampaignPerformanceReportRequest" + - "ConversionPerformanceReportRequest" + - "DestinationUrlPerformanceReportRequest" + - "DSAAutoTargetPerformanceReportRequest" + - "DSACategoryPerformanceReportRequest" + - "DSASearchQueryPerformanceReportRequest" + - "GeographicPerformanceReportRequest" + - "GoalsAndFunnelsReportRequest" + - "HotelDimensionPerformanceReportRequest" + - "HotelGroupPerformanceReportRequest" + - "KeywordPerformanceReportRequest" + - "NegativeKeywordConflictReportRequest" + - "ProductDimensionPerformanceReportRequest" + - "ProductMatchCountReportRequest" + - "ProductNegativeKeywordConflictReportRequest" + - "ProductPartitionPerformanceReportRequest" + - "ProductPartitionUnitPerformanceReportRequest" + - "ProductSearchQueryPerformanceReportRequest" + - "ProfessionalDemographicsAudienceReportRequest" + - "PublisherUsagePerformanceReportRequest" + - "SearchCampaignChangeHistoryReportRequest" + - "SearchQueryPerformanceReportRequest" + - "ShareOfVoiceReportRequest" + - "UserLocationPerformanceReportRequest" + report_columns: + title: "Columns" + description: + "A list of available report object columns. You can find\ + \ it in description of reporting object that you want to add to\ + \ custom report." + type: "array" + items: + description: "Name of report column." + type: "string" + minItems: 1 + report_aggregation: + title: "Aggregation" + description: "A list of available aggregations." + type: "string" + items: + title: "ValidEnums" + description: "An enumeration of aggregations." + enum: + - "Hourly" + - "Daily" + - "Weekly" + - "Monthly" + - "DayOfWeek" + - "HourOfDay" + - "WeeklyStartingMonday" + - "Summary" + default: + - "Hourly" + required: + - "name" + - "reporting_object" + - "report_columns" + - "report_aggregation" + source-monday: + title: "Monday Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + subdomain: + type: "string" + title: "Subdomain/Slug" + description: + "Slug/subdomain of the account, or the first part of\ + \ the URL that comes before .monday.com" + default: "" + order: 0 + auth_type: + type: "string" + const: "oauth2.0" + order: 1 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: "API Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "monday" + const: "monday" + enum: + - "monday" + order: 0 + type: "string" + source-monday-update: + title: "Monday Spec" + type: "object" + required: [] + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + subdomain: + type: "string" + title: "Subdomain/Slug" + description: + "Slug/subdomain of the account, or the first part of\ + \ the URL that comes before .monday.com" + default: "" + order: 0 + auth_type: + type: "string" + const: "oauth2.0" + order: 1 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: "API Token for making authenticated requests." + airbyte_secret: true + source-algolia: + type: "object" + required: + - "api_key" + - "application_id" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + application_id: + type: "string" + description: "The application ID for your application found in settings" + order: 1 + title: "Application ID" + search_query: + type: "string" + description: + "Search query to be used with indexes_query stream with format\ + \ defined in `https://www.algolia.com/doc/rest-api/search/#tag/Search/operation/searchSingleIndex`" + order: 2 + title: "Indexes Search query" + default: "hitsPerPage=2&getRankingInfo=1" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + object_id: + type: "string" + description: "Object ID within index for search queries" + order: 4 + title: "Object ID" + default: "ecommerce-sample-data-9999996" + sourceType: + title: "algolia" + const: "algolia" + enum: + - "algolia" + order: 0 + type: "string" + source-algolia-update: + type: "object" + required: + - "api_key" + - "application_id" + - "start_date" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + application_id: + type: "string" + description: "The application ID for your application found in settings" + order: 1 + title: "Application ID" + search_query: + type: "string" + description: + "Search query to be used with indexes_query stream with format\ + \ defined in `https://www.algolia.com/doc/rest-api/search/#tag/Search/operation/searchSingleIndex`" + order: 2 + title: "Indexes Search query" + default: "hitsPerPage=2&getRankingInfo=1" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + object_id: + type: "string" + description: "Object ID within index for search queries" + order: 4 + title: "Object ID" + default: "ecommerce-sample-data-9999996" + source-amplitude: + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + - "sourceType" + properties: + data_region: + type: "string" + title: "Data region" + description: "Amplitude data region server" + enum: + - "Standard Server" + - "EU Residency Server" + default: "Standard Server" + api_key: + type: "string" + title: "API Key" + description: + "Amplitude API Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + secret_key: + type: "string" + title: "Secret Key" + description: + "Amplitude Secret Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + format: "date-time" + title: "Replication Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + request_time_range: + type: "integer" + title: "Request time range" + description: + "According to Considerations too big time range in request can cause a timeout\ + \ error. In this case, set shorter time interval in hours." + default: 24 + minimum: 1 + maximum: 8760 + sourceType: + title: "amplitude" + const: "amplitude" + enum: + - "amplitude" + order: 0 + type: "string" + source-amplitude-update: + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + properties: + data_region: + type: "string" + title: "Data region" + description: "Amplitude data region server" + enum: + - "Standard Server" + - "EU Residency Server" + default: "Standard Server" + api_key: + type: "string" + title: "API Key" + description: + "Amplitude API Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + secret_key: + type: "string" + title: "Secret Key" + description: + "Amplitude Secret Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + start_date: + type: "string" + format: "date-time" + title: "Replication Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + request_time_range: + type: "integer" + title: "Request time range" + description: + "According to Considerations too big time range in request can cause a timeout\ + \ error. In this case, set shorter time interval in hours." + default: 24 + minimum: 1 + maximum: 8760 + source-google-pagespeed-insights: + type: "object" + required: + - "categories" + - "strategies" + - "urls" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited\ + \ when using without API Key. Creating and using the API key therefore\ + \ is recommended. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + categories: + type: "array" + items: + type: "string" + enum: + - "accessibility" + - "best-practices" + - "performance" + - "pwa" + - "seo" + title: "Lighthouse Categories" + description: + "Defines which Lighthouse category to run. One or many of:\ + \ \"accessibility\", \"best-practices\", \"performance\", \"pwa\", \"\ + seo\"." + order: 1 + strategies: + type: "array" + items: + type: "string" + enum: + - "desktop" + - "mobile" + title: "Analyses Strategies" + description: + "The analyses strategy to use. Either \"desktop\" or \"mobile\"\ + ." + order: 2 + urls: + type: "array" + items: + type: "string" + pattern: + "^(?:origin:)?(http(s)?:\\/\\/)[\\w.-]+(?:\\.[\\w\\.-]+)+[\\\ + w\\-\\._~:\\/?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$" + title: "URLs to analyse" + description: + "The URLs to retrieve pagespeed information from. The connector\ + \ will attempt to sync PageSpeed reports for all the defined URLs. Format:\ + \ https://(www.)url.domain" + example: "https://example.com" + order: 3 + sourceType: + title: "google-pagespeed-insights" + const: "google-pagespeed-insights" + enum: + - "google-pagespeed-insights" + order: 0 + type: "string" + source-google-pagespeed-insights-update: + type: "object" + required: + - "categories" + - "strategies" + - "urls" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited\ + \ when using without API Key. Creating and using the API key therefore\ + \ is recommended. The key is case sensitive." + airbyte_secret: true + order: 0 + categories: + type: "array" + items: + type: "string" + enum: + - "accessibility" + - "best-practices" + - "performance" + - "pwa" + - "seo" + title: "Lighthouse Categories" + description: + "Defines which Lighthouse category to run. One or many of:\ + \ \"accessibility\", \"best-practices\", \"performance\", \"pwa\", \"\ + seo\"." + order: 1 + strategies: + type: "array" + items: + type: "string" + enum: + - "desktop" + - "mobile" + title: "Analyses Strategies" + description: + "The analyses strategy to use. Either \"desktop\" or \"mobile\"\ + ." + order: 2 + urls: + type: "array" + items: + type: "string" + pattern: + "^(?:origin:)?(http(s)?:\\/\\/)[\\w.-]+(?:\\.[\\w\\.-]+)+[\\\ + w\\-\\._~:\\/?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$" + title: "URLs to analyse" + description: + "The URLs to retrieve pagespeed information from. The connector\ + \ will attempt to sync PageSpeed reports for all the defined URLs. Format:\ + \ https://(www.)url.domain" + example: "https://example.com" + order: 3 + source-savvycal: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Go to SavvyCal → Settings → Developer → Personal Tokens and\ + \ make a new token. Then, copy the private key. https://savvycal.com/developers" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "savvycal" + const: "savvycal" + enum: + - "savvycal" + order: 0 + type: "string" + source-savvycal-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Go to SavvyCal → Settings → Developer → Personal Tokens and\ + \ make a new token. Then, copy the private key. https://savvycal.com/developers" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-shortcut: + type: "object" + required: + - "api_key_2" + - "start_date" + - "sourceType" + properties: + api_key_2: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + query: + type: "string" + description: "Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`" + title: "Query" + default: "title:Our first Epic" + order: 2 + sourceType: + title: "shortcut" + const: "shortcut" + enum: + - "shortcut" + order: 0 + type: "string" + source-shortcut-update: + type: "object" + required: + - "api_key_2" + - "start_date" + properties: + api_key_2: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + query: + type: "string" + description: "Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`" + title: "Query" + default: "title:Our first Epic" + order: 2 + source-pipedrive: + title: "Pipedrive Spec" + type: "object" + required: + - "replication_start_date" + - "api_token" + - "sourceType" + properties: + api_token: + title: "API Token" + type: "string" + description: "The Pipedrive API Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + examples: + - "2017-01-25 00:00:00Z" + type: "string" + sourceType: + title: "pipedrive" + const: "pipedrive" + enum: + - "pipedrive" + order: 0 + type: "string" + source-pipedrive-update: + title: "Pipedrive Spec" + type: "object" + required: + - "replication_start_date" + - "api_token" + properties: + api_token: + title: "API Token" + type: "string" + description: "The Pipedrive API Token." + airbyte_secret: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + examples: + - "2017-01-25 00:00:00Z" + type: "string" + source-amazon-ads: + title: "Amazon Ads Spec" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + enum: + - "NA" + - "EU" + - "FE" + type: "string" + default: "NA" + order: 4 + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: "string" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + items: + type: "string" + enum: + - "enabled" + - "paused" + - "archived" + type: "array" + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + items: + type: "string" + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "array" + uniqueItems: true + order: 10 + sourceType: + title: "amazon-ads" + const: "amazon-ads" + enum: + - "amazon-ads" + order: 0 + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + source-amazon-ads-update: + title: "Amazon Ads Spec" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + enum: + - "NA" + - "EU" + - "FE" + type: "string" + default: "NA" + order: 4 + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: "string" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + items: + type: "string" + enum: + - "enabled" + - "paused" + - "archived" + type: "array" + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + items: + type: "string" + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "array" + uniqueItems: true + order: 10 + required: + - "client_id" + - "client_secret" + - "refresh_token" + source-sendinblue: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "sendinblue" + const: "sendinblue" + enum: + - "sendinblue" + order: 0 + type: "string" + source-sendinblue-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 0 + source-github: + title: "GitHub Source Spec" + type: "object" + required: + - "credentials" + - "repositories" + - "sourceType" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to GitHub" + type: "object" + order: 0 + group: "auth" + oneOf: + - type: "object" + title: "OAuth" + required: + - "access_token" + properties: + option_title: + type: "string" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "OAuth access token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + const: "PAT Credentials" + order: 0 + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: + "Log into GitHub and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + x-speakeasy-param-sensitive: true + repositories: + type: "array" + items: + type: "string" + pattern: "^[\\w.-]+/(([\\w.-]*\\*)|[\\w.-]+(?docs for more info" + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + order: 2 + format: "date-time" + api_url: + type: "string" + examples: + - "https://github.com" + - "https://github.company.org" + title: "API URL" + default: "https://api.github.com/" + description: + "Please enter your basic URL from self-hosted GitHub instance\ + \ or leave it empty to use GitHub." + order: 3 + branches: + type: "array" + items: + type: "string" + title: "Branches" + examples: + - "airbytehq/airbyte/master" + - "airbytehq/airbyte/my-branch" + description: + "List of GitHub repository branches to pull commits for, e.g.\ + \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\ + \ the default branch will be pulled." + order: 4 + pattern_descriptor: "org/repo/branch1 org/repo/branch2" + max_waiting_time: + type: "integer" + title: "Max Waiting Time (in minutes)" + examples: + - 10 + - 30 + - 60 + default: 10 + minimum: 1 + maximum: 60 + description: + "Max Waiting Time for rate limit. Set higher value to wait\ + \ till rate limits will be resetted to continue sync" + order: 5 + sourceType: + title: "github" + const: "github" + enum: + - "github" + order: 0 + type: "string" + source-github-update: + title: "GitHub Source Spec" + type: "object" + required: + - "credentials" + - "repositories" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to GitHub" + type: "object" + order: 0 + group: "auth" + oneOf: + - type: "object" + title: "OAuth" + required: + - "access_token" + properties: + option_title: + type: "string" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "OAuth access token" + airbyte_secret: true + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + - type: "object" + title: "Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + const: "PAT Credentials" + order: 0 + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: + "Log into GitHub and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + repositories: + type: "array" + items: + type: "string" + pattern: "^[\\w.-]+/(([\\w.-]*\\*)|[\\w.-]+(?docs for more info" + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + order: 2 + format: "date-time" + api_url: + type: "string" + examples: + - "https://github.com" + - "https://github.company.org" + title: "API URL" + default: "https://api.github.com/" + description: + "Please enter your basic URL from self-hosted GitHub instance\ + \ or leave it empty to use GitHub." + order: 3 + branches: + type: "array" + items: + type: "string" + title: "Branches" + examples: + - "airbytehq/airbyte/master" + - "airbytehq/airbyte/my-branch" + description: + "List of GitHub repository branches to pull commits for, e.g.\ + \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\ + \ the default branch will be pulled." + order: 4 + pattern_descriptor: "org/repo/branch1 org/repo/branch2" + max_waiting_time: + type: "integer" + title: "Max Waiting Time (in minutes)" + examples: + - 10 + - 30 + - 60 + default: 10 + minimum: 1 + maximum: 60 + description: + "Max Waiting Time for rate limit. Set higher value to wait\ + \ till rate limits will be resetted to continue sync" + order: 5 + source-guru: + type: "object" + required: + - "username" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + team_id: + type: "string" + description: + "Team ID received through response of /teams streams, make\ + \ sure about access to the team" + order: 3 + title: "team_id" + search_cards_query: + type: "string" + description: "Query for searching cards" + order: 4 + title: "search_cards_query" + sourceType: + title: "guru" + const: "guru" + enum: + - "guru" + order: 0 + type: "string" + source-guru-update: + type: "object" + required: + - "username" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + team_id: + type: "string" + description: + "Team ID received through response of /teams streams, make\ + \ sure about access to the team" + order: 3 + title: "team_id" + search_cards_query: + type: "string" + description: "Query for searching cards" + order: 4 + title: "search_cards_query" + source-bigquery: + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + - "sourceType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: + "The dataset ID to search for tables and views. If you are\ + \ only loading data from one dataset, setting this option could result\ + \ in much faster schema discovery." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: + "The contents of your Service Account Key JSON file. See the\ + \ docs for more information on how to obtain this key." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "bigquery" + const: "bigquery" + enum: + - "bigquery" + order: 0 + type: "string" + source-bigquery-update: + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: + "The dataset ID to search for tables and views. If you are\ + \ only loading data from one dataset, setting this option could result\ + \ in much faster schema discovery." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: + "The contents of your Service Account Key JSON file. See the\ + \ docs for more information on how to obtain this key." + title: "Credentials JSON" + airbyte_secret: true + source-vantage: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API Access token. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "vantage" + const: "vantage" + enum: + - "vantage" + order: 0 + type: "string" + source-vantage-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API Access token. See here." + airbyte_secret: true + order: 0 + source-calendly: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Go to Integrations → API & Webhooks to obtain your bearer\ + \ token. https://calendly.com/integrations/api_webhooks" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "calendly" + const: "calendly" + enum: + - "calendly" + order: 0 + type: "string" + source-calendly-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "Go to Integrations → API & Webhooks to obtain your bearer\ + \ token. https://calendly.com/integrations/api_webhooks" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-picqer: + type: "object" + required: + - "username" + - "organization_name" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + title: "Username" + order: 0 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + organization_name: + type: "string" + description: "The organization name which is used to login to picqer" + title: "Organization Name" + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "picqer" + const: "picqer" + enum: + - "picqer" + order: 0 + type: "string" + source-picqer-update: + type: "object" + required: + - "username" + - "organization_name" + - "start_date" + properties: + username: + type: "string" + title: "Username" + order: 0 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 1 + organization_name: + type: "string" + description: "The organization name which is used to login to picqer" + title: "Organization Name" + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + source-firebolt: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + x-speakeasy-param-sensitive: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + sourceType: + title: "firebolt" + const: "firebolt" + enum: + - "firebolt" + order: 0 + type: "string" + source-firebolt-update: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + source-clazar: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "clazar" + const: "clazar" + enum: + - "clazar" + order: 0 + type: "string" + source-clazar-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + source-outreach: + title: "Source Outreach Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "redirect_uri" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Outreach developer application." + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Outreach developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining the new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + redirect_uri: + type: "string" + title: "Redirect URI" + description: + "A Redirect URI is the location where the authorization server\ + \ sends the user once the app has been successfully authorized and granted\ + \ an authorization code or access token." + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Outreach\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "outreach" + const: "outreach" + enum: + - "outreach" + order: 0 + type: "string" + source-outreach-update: + title: "Source Outreach Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "redirect_uri" + - "start_date" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Outreach developer application." + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Outreach developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining the new access token." + airbyte_secret: true + redirect_uri: + type: "string" + title: "Redirect URI" + description: + "A Redirect URI is the location where the authorization server\ + \ sends the user once the app has been successfully authorized and granted\ + \ an authorization code or access token." + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Outreach\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-pokeapi: + type: "object" + required: + - "pokemon_name" + - "sourceType" + properties: + pokemon_name: + type: "string" + description: "Pokemon requested from the API." + title: "Pokemon Name" + pattern: "^[a-z0-9_\\-]+$" + enum: + - "bulbasaur" + - "ivysaur" + - "venusaur" + - "charmander" + - "charmeleon" + - "charizard" + - "squirtle" + - "wartortle" + - "blastoise" + - "caterpie" + - "metapod" + - "butterfree" + - "weedle" + - "kakuna" + - "beedrill" + - "pidgey" + - "pidgeotto" + - "pidgeot" + - "rattata" + - "raticate" + - "spearow" + - "fearow" + - "ekans" + - "arbok" + - "pikachu" + - "raichu" + - "sandshrew" + - "sandslash" + - "nidoranf" + - "nidorina" + - "nidoqueen" + - "nidoranm" + - "nidorino" + - "nidoking" + - "clefairy" + - "clefable" + - "vulpix" + - "ninetales" + - "jigglypuff" + - "wigglytuff" + - "zubat" + - "golbat" + - "oddish" + - "gloom" + - "vileplume" + - "paras" + - "parasect" + - "venonat" + - "venomoth" + - "diglett" + - "dugtrio" + - "meowth" + - "persian" + - "psyduck" + - "golduck" + - "mankey" + - "primeape" + - "growlithe" + - "arcanine" + - "poliwag" + - "poliwhirl" + - "poliwrath" + - "abra" + - "kadabra" + - "alakazam" + - "machop" + - "machoke" + - "machamp" + - "bellsprout" + - "weepinbell" + - "victreebel" + - "tentacool" + - "tentacruel" + - "geodude" + - "graveler" + - "golem" + - "ponyta" + - "rapidash" + - "slowpoke" + - "slowbro" + - "magnemite" + - "magneton" + - "farfetchd" + - "doduo" + - "dodrio" + - "seel" + - "dewgong" + - "grimer" + - "muk" + - "shellder" + - "cloyster" + - "gastly" + - "haunter" + - "gengar" + - "onix" + - "drowzee" + - "hypno" + - "krabby" + - "kingler" + - "voltorb" + - "electrode" + - "exeggcute" + - "exeggutor" + - "cubone" + - "marowak" + - "hitmonlee" + - "hitmonchan" + - "lickitung" + - "koffing" + - "weezing" + - "rhyhorn" + - "rhydon" + - "chansey" + - "tangela" + - "kangaskhan" + - "horsea" + - "seadra" + - "goldeen" + - "seaking" + - "staryu" + - "starmie" + - "mrmime" + - "scyther" + - "jynx" + - "electabuzz" + - "magmar" + - "pinsir" + - "tauros" + - "magikarp" + - "gyarados" + - "lapras" + - "ditto" + - "eevee" + - "vaporeon" + - "jolteon" + - "flareon" + - "porygon" + - "omanyte" + - "omastar" + - "kabuto" + - "kabutops" + - "aerodactyl" + - "snorlax" + - "articuno" + - "zapdos" + - "moltres" + - "dratini" + - "dragonair" + - "dragonite" + - "mewtwo" + - "mew" + - "chikorita" + - "bayleef" + - "meganium" + - "cyndaquil" + - "quilava" + - "typhlosion" + - "totodile" + - "croconaw" + - "feraligatr" + - "sentret" + - "furret" + - "hoothoot" + - "noctowl" + - "ledyba" + - "ledian" + - "spinarak" + - "ariados" + - "crobat" + - "chinchou" + - "lanturn" + - "pichu" + - "cleffa" + - "igglybuff" + - "togepi" + - "togetic" + - "natu" + - "xatu" + - "mareep" + - "flaaffy" + - "ampharos" + - "bellossom" + - "marill" + - "azumarill" + - "sudowoodo" + - "politoed" + - "hoppip" + - "skiploom" + - "jumpluff" + - "aipom" + - "sunkern" + - "sunflora" + - "yanma" + - "wooper" + - "quagsire" + - "espeon" + - "umbreon" + - "murkrow" + - "slowking" + - "misdreavus" + - "unown" + - "wobbuffet" + - "girafarig" + - "pineco" + - "forretress" + - "dunsparce" + - "gligar" + - "steelix" + - "snubbull" + - "granbull" + - "qwilfish" + - "scizor" + - "shuckle" + - "heracross" + - "sneasel" + - "teddiursa" + - "ursaring" + - "slugma" + - "magcargo" + - "swinub" + - "piloswine" + - "corsola" + - "remoraid" + - "octillery" + - "delibird" + - "mantine" + - "skarmory" + - "houndour" + - "houndoom" + - "kingdra" + - "phanpy" + - "donphan" + - "porygon2" + - "stantler" + - "smeargle" + - "tyrogue" + - "hitmontop" + - "smoochum" + - "elekid" + - "magby" + - "miltank" + - "blissey" + - "raikou" + - "entei" + - "suicune" + - "larvitar" + - "pupitar" + - "tyranitar" + - "lugia" + - "ho-oh" + - "celebi" + - "treecko" + - "grovyle" + - "sceptile" + - "torchic" + - "combusken" + - "blaziken" + - "mudkip" + - "marshtomp" + - "swampert" + - "poochyena" + - "mightyena" + - "zigzagoon" + - "linoone" + - "wurmple" + - "silcoon" + - "beautifly" + - "cascoon" + - "dustox" + - "lotad" + - "lombre" + - "ludicolo" + - "seedot" + - "nuzleaf" + - "shiftry" + - "taillow" + - "swellow" + - "wingull" + - "pelipper" + - "ralts" + - "kirlia" + - "gardevoir" + - "surskit" + - "masquerain" + - "shroomish" + - "breloom" + - "slakoth" + - "vigoroth" + - "slaking" + - "nincada" + - "ninjask" + - "shedinja" + - "whismur" + - "loudred" + - "exploud" + - "makuhita" + - "hariyama" + - "azurill" + - "nosepass" + - "skitty" + - "delcatty" + - "sableye" + - "mawile" + - "aron" + - "lairon" + - "aggron" + - "meditite" + - "medicham" + - "electrike" + - "manectric" + - "plusle" + - "minun" + - "volbeat" + - "illumise" + - "roselia" + - "gulpin" + - "swalot" + - "carvanha" + - "sharpedo" + - "wailmer" + - "wailord" + - "numel" + - "camerupt" + - "torkoal" + - "spoink" + - "grumpig" + - "spinda" + - "trapinch" + - "vibrava" + - "flygon" + - "cacnea" + - "cacturne" + - "swablu" + - "altaria" + - "zangoose" + - "seviper" + - "lunatone" + - "solrock" + - "barboach" + - "whiscash" + - "corphish" + - "crawdaunt" + - "baltoy" + - "claydol" + - "lileep" + - "cradily" + - "anorith" + - "armaldo" + - "feebas" + - "milotic" + - "castform" + - "kecleon" + - "shuppet" + - "banette" + - "duskull" + - "dusclops" + - "tropius" + - "chimecho" + - "absol" + - "wynaut" + - "snorunt" + - "glalie" + - "spheal" + - "sealeo" + - "walrein" + - "clamperl" + - "huntail" + - "gorebyss" + - "relicanth" + - "luvdisc" + - "bagon" + - "shelgon" + - "salamence" + - "beldum" + - "metang" + - "metagross" + - "regirock" + - "regice" + - "registeel" + - "latias" + - "latios" + - "kyogre" + - "groudon" + - "rayquaza" + - "jirachi" + - "deoxys" + - "turtwig" + - "grotle" + - "torterra" + - "chimchar" + - "monferno" + - "infernape" + - "piplup" + - "prinplup" + - "empoleon" + - "starly" + - "staravia" + - "staraptor" + - "bidoof" + - "bibarel" + - "kricketot" + - "kricketune" + - "shinx" + - "luxio" + - "luxray" + - "budew" + - "roserade" + - "cranidos" + - "rampardos" + - "shieldon" + - "bastiodon" + - "burmy" + - "wormadam" + - "mothim" + - "combee" + - "vespiquen" + - "pachirisu" + - "buizel" + - "floatzel" + - "cherubi" + - "cherrim" + - "shellos" + - "gastrodon" + - "ambipom" + - "drifloon" + - "drifblim" + - "buneary" + - "lopunny" + - "mismagius" + - "honchkrow" + - "glameow" + - "purugly" + - "chingling" + - "stunky" + - "skuntank" + - "bronzor" + - "bronzong" + - "bonsly" + - "mimejr" + - "happiny" + - "chatot" + - "spiritomb" + - "gible" + - "gabite" + - "garchomp" + - "munchlax" + - "riolu" + - "lucario" + - "hippopotas" + - "hippowdon" + - "skorupi" + - "drapion" + - "croagunk" + - "toxicroak" + - "carnivine" + - "finneon" + - "lumineon" + - "mantyke" + - "snover" + - "abomasnow" + - "weavile" + - "magnezone" + - "lickilicky" + - "rhyperior" + - "tangrowth" + - "electivire" + - "magmortar" + - "togekiss" + - "yanmega" + - "leafeon" + - "glaceon" + - "gliscor" + - "mamoswine" + - "porygon-z" + - "gallade" + - "probopass" + - "dusknoir" + - "froslass" + - "rotom" + - "uxie" + - "mesprit" + - "azelf" + - "dialga" + - "palkia" + - "heatran" + - "regigigas" + - "giratina" + - "cresselia" + - "phione" + - "manaphy" + - "darkrai" + - "shaymin" + - "arceus" + - "victini" + - "snivy" + - "servine" + - "serperior" + - "tepig" + - "pignite" + - "emboar" + - "oshawott" + - "dewott" + - "samurott" + - "patrat" + - "watchog" + - "lillipup" + - "herdier" + - "stoutland" + - "purrloin" + - "liepard" + - "pansage" + - "simisage" + - "pansear" + - "simisear" + - "panpour" + - "simipour" + - "munna" + - "musharna" + - "pidove" + - "tranquill" + - "unfezant" + - "blitzle" + - "zebstrika" + - "roggenrola" + - "boldore" + - "gigalith" + - "woobat" + - "swoobat" + - "drilbur" + - "excadrill" + - "audino" + - "timburr" + - "gurdurr" + - "conkeldurr" + - "tympole" + - "palpitoad" + - "seismitoad" + - "throh" + - "sawk" + - "sewaddle" + - "swadloon" + - "leavanny" + - "venipede" + - "whirlipede" + - "scolipede" + - "cottonee" + - "whimsicott" + - "petilil" + - "lilligant" + - "basculin" + - "sandile" + - "krokorok" + - "krookodile" + - "darumaka" + - "darmanitan" + - "maractus" + - "dwebble" + - "crustle" + - "scraggy" + - "scrafty" + - "sigilyph" + - "yamask" + - "cofagrigus" + - "tirtouga" + - "carracosta" + - "archen" + - "archeops" + - "trubbish" + - "garbodor" + - "zorua" + - "zoroark" + - "minccino" + - "cinccino" + - "gothita" + - "gothorita" + - "gothitelle" + - "solosis" + - "duosion" + - "reuniclus" + - "ducklett" + - "swanna" + - "vanillite" + - "vanillish" + - "vanilluxe" + - "deerling" + - "sawsbuck" + - "emolga" + - "karrablast" + - "escavalier" + - "foongus" + - "amoonguss" + - "frillish" + - "jellicent" + - "alomomola" + - "joltik" + - "galvantula" + - "ferroseed" + - "ferrothorn" + - "klink" + - "klang" + - "klinklang" + - "tynamo" + - "eelektrik" + - "eelektross" + - "elgyem" + - "beheeyem" + - "litwick" + - "lampent" + - "chandelure" + - "axew" + - "fraxure" + - "haxorus" + - "cubchoo" + - "beartic" + - "cryogonal" + - "shelmet" + - "accelgor" + - "stunfisk" + - "mienfoo" + - "mienshao" + - "druddigon" + - "golett" + - "golurk" + - "pawniard" + - "bisharp" + - "bouffalant" + - "rufflet" + - "braviary" + - "vullaby" + - "mandibuzz" + - "heatmor" + - "durant" + - "deino" + - "zweilous" + - "hydreigon" + - "larvesta" + - "volcarona" + - "cobalion" + - "terrakion" + - "virizion" + - "tornadus" + - "thundurus" + - "reshiram" + - "zekrom" + - "landorus" + - "kyurem" + - "keldeo" + - "meloetta" + - "genesect" + - "chespin" + - "quilladin" + - "chesnaught" + - "fennekin" + - "braixen" + - "delphox" + - "froakie" + - "frogadier" + - "greninja" + - "bunnelby" + - "diggersby" + - "fletchling" + - "fletchinder" + - "talonflame" + - "scatterbug" + - "spewpa" + - "vivillon" + - "litleo" + - "pyroar" + - "flabebe" + - "floette" + - "florges" + - "skiddo" + - "gogoat" + - "pancham" + - "pangoro" + - "furfrou" + - "espurr" + - "meowstic" + - "honedge" + - "doublade" + - "aegislash" + - "spritzee" + - "aromatisse" + - "swirlix" + - "slurpuff" + - "inkay" + - "malamar" + - "binacle" + - "barbaracle" + - "skrelp" + - "dragalge" + - "clauncher" + - "clawitzer" + - "helioptile" + - "heliolisk" + - "tyrunt" + - "tyrantrum" + - "amaura" + - "aurorus" + - "sylveon" + - "hawlucha" + - "dedenne" + - "carbink" + - "goomy" + - "sliggoo" + - "goodra" + - "klefki" + - "phantump" + - "trevenant" + - "pumpkaboo" + - "gourgeist" + - "bergmite" + - "avalugg" + - "noibat" + - "noivern" + - "xerneas" + - "yveltal" + - "zygarde" + - "diancie" + - "hoopa" + - "volcanion" + - "rowlet" + - "dartrix" + - "decidueye" + - "litten" + - "torracat" + - "incineroar" + - "popplio" + - "brionne" + - "primarina" + - "pikipek" + - "trumbeak" + - "toucannon" + - "yungoos" + - "gumshoos" + - "grubbin" + - "charjabug" + - "vikavolt" + - "crabrawler" + - "crabominable" + - "oricorio" + - "cutiefly" + - "ribombee" + - "rockruff" + - "lycanroc" + - "wishiwashi" + - "mareanie" + - "toxapex" + - "mudbray" + - "mudsdale" + - "dewpider" + - "araquanid" + - "fomantis" + - "lurantis" + - "morelull" + - "shiinotic" + - "salandit" + - "salazzle" + - "stufful" + - "bewear" + - "bounsweet" + - "steenee" + - "tsareena" + - "comfey" + - "oranguru" + - "passimian" + - "wimpod" + - "golisopod" + - "sandygast" + - "palossand" + - "pyukumuku" + - "typenull" + - "silvally" + - "minior" + - "komala" + - "turtonator" + - "togedemaru" + - "mimikyu" + - "bruxish" + - "drampa" + - "dhelmise" + - "jangmo-o" + - "hakamo-o" + - "kommo-o" + - "tapukoko" + - "tapulele" + - "tapubulu" + - "tapufini" + - "cosmog" + - "cosmoem" + - "solgaleo" + - "lunala" + - "nihilego" + - "buzzwole" + - "pheromosa" + - "xurkitree" + - "celesteela" + - "kartana" + - "guzzlord" + - "necrozma" + - "magearna" + - "marshadow" + - "poipole" + - "naganadel" + - "stakataka" + - "blacephalon" + - "zeraora" + - "meltan" + - "melmetal" + - "grookey" + - "thwackey" + - "rillaboom" + - "scorbunny" + - "raboot" + - "cinderace" + - "sobble" + - "drizzile" + - "inteleon" + - "skwovet" + - "greedent" + - "rookidee" + - "corvisquire" + - "corviknight" + - "blipbug" + - "dottler" + - "orbeetle" + - "nickit" + - "thievul" + - "gossifleur" + - "eldegoss" + - "wooloo" + - "dubwool" + - "chewtle" + - "drednaw" + - "yamper" + - "boltund" + - "rolycoly" + - "carkol" + - "coalossal" + - "applin" + - "flapple" + - "appletun" + - "silicobra" + - "sandaconda" + - "cramorant" + - "arrokuda" + - "barraskewda" + - "toxel" + - "toxtricity" + - "sizzlipede" + - "centiskorch" + - "clobbopus" + - "grapploct" + - "sinistea" + - "polteageist" + - "hatenna" + - "hattrem" + - "hatterene" + - "impidimp" + - "morgrem" + - "grimmsnarl" + - "obstagoon" + - "perrserker" + - "cursola" + - "sirfetchd" + - "mrrime" + - "runerigus" + - "milcery" + - "alcremie" + - "falinks" + - "pincurchin" + - "snom" + - "frosmoth" + - "stonjourner" + - "eiscue" + - "indeedee" + - "morpeko" + - "cufant" + - "copperajah" + - "dracozolt" + - "arctozolt" + - "dracovish" + - "arctovish" + - "duraludon" + - "dreepy" + - "drakloak" + - "dragapult" + - "zacian" + - "zamazenta" + - "eternatus" + - "kubfu" + - "urshifu" + - "zarude" + - "regieleki" + - "regidrago" + - "glastrier" + - "spectrier" + - "calyrex" + examples: + - "ditto" + - "luxray" + - "snorlax" + order: 0 + sourceType: + title: "pokeapi" + const: "pokeapi" + enum: + - "pokeapi" + order: 0 + type: "string" + source-pokeapi-update: + type: "object" + required: + - "pokemon_name" + properties: + pokemon_name: + type: "string" + description: "Pokemon requested from the API." + title: "Pokemon Name" + pattern: "^[a-z0-9_\\-]+$" + enum: + - "bulbasaur" + - "ivysaur" + - "venusaur" + - "charmander" + - "charmeleon" + - "charizard" + - "squirtle" + - "wartortle" + - "blastoise" + - "caterpie" + - "metapod" + - "butterfree" + - "weedle" + - "kakuna" + - "beedrill" + - "pidgey" + - "pidgeotto" + - "pidgeot" + - "rattata" + - "raticate" + - "spearow" + - "fearow" + - "ekans" + - "arbok" + - "pikachu" + - "raichu" + - "sandshrew" + - "sandslash" + - "nidoranf" + - "nidorina" + - "nidoqueen" + - "nidoranm" + - "nidorino" + - "nidoking" + - "clefairy" + - "clefable" + - "vulpix" + - "ninetales" + - "jigglypuff" + - "wigglytuff" + - "zubat" + - "golbat" + - "oddish" + - "gloom" + - "vileplume" + - "paras" + - "parasect" + - "venonat" + - "venomoth" + - "diglett" + - "dugtrio" + - "meowth" + - "persian" + - "psyduck" + - "golduck" + - "mankey" + - "primeape" + - "growlithe" + - "arcanine" + - "poliwag" + - "poliwhirl" + - "poliwrath" + - "abra" + - "kadabra" + - "alakazam" + - "machop" + - "machoke" + - "machamp" + - "bellsprout" + - "weepinbell" + - "victreebel" + - "tentacool" + - "tentacruel" + - "geodude" + - "graveler" + - "golem" + - "ponyta" + - "rapidash" + - "slowpoke" + - "slowbro" + - "magnemite" + - "magneton" + - "farfetchd" + - "doduo" + - "dodrio" + - "seel" + - "dewgong" + - "grimer" + - "muk" + - "shellder" + - "cloyster" + - "gastly" + - "haunter" + - "gengar" + - "onix" + - "drowzee" + - "hypno" + - "krabby" + - "kingler" + - "voltorb" + - "electrode" + - "exeggcute" + - "exeggutor" + - "cubone" + - "marowak" + - "hitmonlee" + - "hitmonchan" + - "lickitung" + - "koffing" + - "weezing" + - "rhyhorn" + - "rhydon" + - "chansey" + - "tangela" + - "kangaskhan" + - "horsea" + - "seadra" + - "goldeen" + - "seaking" + - "staryu" + - "starmie" + - "mrmime" + - "scyther" + - "jynx" + - "electabuzz" + - "magmar" + - "pinsir" + - "tauros" + - "magikarp" + - "gyarados" + - "lapras" + - "ditto" + - "eevee" + - "vaporeon" + - "jolteon" + - "flareon" + - "porygon" + - "omanyte" + - "omastar" + - "kabuto" + - "kabutops" + - "aerodactyl" + - "snorlax" + - "articuno" + - "zapdos" + - "moltres" + - "dratini" + - "dragonair" + - "dragonite" + - "mewtwo" + - "mew" + - "chikorita" + - "bayleef" + - "meganium" + - "cyndaquil" + - "quilava" + - "typhlosion" + - "totodile" + - "croconaw" + - "feraligatr" + - "sentret" + - "furret" + - "hoothoot" + - "noctowl" + - "ledyba" + - "ledian" + - "spinarak" + - "ariados" + - "crobat" + - "chinchou" + - "lanturn" + - "pichu" + - "cleffa" + - "igglybuff" + - "togepi" + - "togetic" + - "natu" + - "xatu" + - "mareep" + - "flaaffy" + - "ampharos" + - "bellossom" + - "marill" + - "azumarill" + - "sudowoodo" + - "politoed" + - "hoppip" + - "skiploom" + - "jumpluff" + - "aipom" + - "sunkern" + - "sunflora" + - "yanma" + - "wooper" + - "quagsire" + - "espeon" + - "umbreon" + - "murkrow" + - "slowking" + - "misdreavus" + - "unown" + - "wobbuffet" + - "girafarig" + - "pineco" + - "forretress" + - "dunsparce" + - "gligar" + - "steelix" + - "snubbull" + - "granbull" + - "qwilfish" + - "scizor" + - "shuckle" + - "heracross" + - "sneasel" + - "teddiursa" + - "ursaring" + - "slugma" + - "magcargo" + - "swinub" + - "piloswine" + - "corsola" + - "remoraid" + - "octillery" + - "delibird" + - "mantine" + - "skarmory" + - "houndour" + - "houndoom" + - "kingdra" + - "phanpy" + - "donphan" + - "porygon2" + - "stantler" + - "smeargle" + - "tyrogue" + - "hitmontop" + - "smoochum" + - "elekid" + - "magby" + - "miltank" + - "blissey" + - "raikou" + - "entei" + - "suicune" + - "larvitar" + - "pupitar" + - "tyranitar" + - "lugia" + - "ho-oh" + - "celebi" + - "treecko" + - "grovyle" + - "sceptile" + - "torchic" + - "combusken" + - "blaziken" + - "mudkip" + - "marshtomp" + - "swampert" + - "poochyena" + - "mightyena" + - "zigzagoon" + - "linoone" + - "wurmple" + - "silcoon" + - "beautifly" + - "cascoon" + - "dustox" + - "lotad" + - "lombre" + - "ludicolo" + - "seedot" + - "nuzleaf" + - "shiftry" + - "taillow" + - "swellow" + - "wingull" + - "pelipper" + - "ralts" + - "kirlia" + - "gardevoir" + - "surskit" + - "masquerain" + - "shroomish" + - "breloom" + - "slakoth" + - "vigoroth" + - "slaking" + - "nincada" + - "ninjask" + - "shedinja" + - "whismur" + - "loudred" + - "exploud" + - "makuhita" + - "hariyama" + - "azurill" + - "nosepass" + - "skitty" + - "delcatty" + - "sableye" + - "mawile" + - "aron" + - "lairon" + - "aggron" + - "meditite" + - "medicham" + - "electrike" + - "manectric" + - "plusle" + - "minun" + - "volbeat" + - "illumise" + - "roselia" + - "gulpin" + - "swalot" + - "carvanha" + - "sharpedo" + - "wailmer" + - "wailord" + - "numel" + - "camerupt" + - "torkoal" + - "spoink" + - "grumpig" + - "spinda" + - "trapinch" + - "vibrava" + - "flygon" + - "cacnea" + - "cacturne" + - "swablu" + - "altaria" + - "zangoose" + - "seviper" + - "lunatone" + - "solrock" + - "barboach" + - "whiscash" + - "corphish" + - "crawdaunt" + - "baltoy" + - "claydol" + - "lileep" + - "cradily" + - "anorith" + - "armaldo" + - "feebas" + - "milotic" + - "castform" + - "kecleon" + - "shuppet" + - "banette" + - "duskull" + - "dusclops" + - "tropius" + - "chimecho" + - "absol" + - "wynaut" + - "snorunt" + - "glalie" + - "spheal" + - "sealeo" + - "walrein" + - "clamperl" + - "huntail" + - "gorebyss" + - "relicanth" + - "luvdisc" + - "bagon" + - "shelgon" + - "salamence" + - "beldum" + - "metang" + - "metagross" + - "regirock" + - "regice" + - "registeel" + - "latias" + - "latios" + - "kyogre" + - "groudon" + - "rayquaza" + - "jirachi" + - "deoxys" + - "turtwig" + - "grotle" + - "torterra" + - "chimchar" + - "monferno" + - "infernape" + - "piplup" + - "prinplup" + - "empoleon" + - "starly" + - "staravia" + - "staraptor" + - "bidoof" + - "bibarel" + - "kricketot" + - "kricketune" + - "shinx" + - "luxio" + - "luxray" + - "budew" + - "roserade" + - "cranidos" + - "rampardos" + - "shieldon" + - "bastiodon" + - "burmy" + - "wormadam" + - "mothim" + - "combee" + - "vespiquen" + - "pachirisu" + - "buizel" + - "floatzel" + - "cherubi" + - "cherrim" + - "shellos" + - "gastrodon" + - "ambipom" + - "drifloon" + - "drifblim" + - "buneary" + - "lopunny" + - "mismagius" + - "honchkrow" + - "glameow" + - "purugly" + - "chingling" + - "stunky" + - "skuntank" + - "bronzor" + - "bronzong" + - "bonsly" + - "mimejr" + - "happiny" + - "chatot" + - "spiritomb" + - "gible" + - "gabite" + - "garchomp" + - "munchlax" + - "riolu" + - "lucario" + - "hippopotas" + - "hippowdon" + - "skorupi" + - "drapion" + - "croagunk" + - "toxicroak" + - "carnivine" + - "finneon" + - "lumineon" + - "mantyke" + - "snover" + - "abomasnow" + - "weavile" + - "magnezone" + - "lickilicky" + - "rhyperior" + - "tangrowth" + - "electivire" + - "magmortar" + - "togekiss" + - "yanmega" + - "leafeon" + - "glaceon" + - "gliscor" + - "mamoswine" + - "porygon-z" + - "gallade" + - "probopass" + - "dusknoir" + - "froslass" + - "rotom" + - "uxie" + - "mesprit" + - "azelf" + - "dialga" + - "palkia" + - "heatran" + - "regigigas" + - "giratina" + - "cresselia" + - "phione" + - "manaphy" + - "darkrai" + - "shaymin" + - "arceus" + - "victini" + - "snivy" + - "servine" + - "serperior" + - "tepig" + - "pignite" + - "emboar" + - "oshawott" + - "dewott" + - "samurott" + - "patrat" + - "watchog" + - "lillipup" + - "herdier" + - "stoutland" + - "purrloin" + - "liepard" + - "pansage" + - "simisage" + - "pansear" + - "simisear" + - "panpour" + - "simipour" + - "munna" + - "musharna" + - "pidove" + - "tranquill" + - "unfezant" + - "blitzle" + - "zebstrika" + - "roggenrola" + - "boldore" + - "gigalith" + - "woobat" + - "swoobat" + - "drilbur" + - "excadrill" + - "audino" + - "timburr" + - "gurdurr" + - "conkeldurr" + - "tympole" + - "palpitoad" + - "seismitoad" + - "throh" + - "sawk" + - "sewaddle" + - "swadloon" + - "leavanny" + - "venipede" + - "whirlipede" + - "scolipede" + - "cottonee" + - "whimsicott" + - "petilil" + - "lilligant" + - "basculin" + - "sandile" + - "krokorok" + - "krookodile" + - "darumaka" + - "darmanitan" + - "maractus" + - "dwebble" + - "crustle" + - "scraggy" + - "scrafty" + - "sigilyph" + - "yamask" + - "cofagrigus" + - "tirtouga" + - "carracosta" + - "archen" + - "archeops" + - "trubbish" + - "garbodor" + - "zorua" + - "zoroark" + - "minccino" + - "cinccino" + - "gothita" + - "gothorita" + - "gothitelle" + - "solosis" + - "duosion" + - "reuniclus" + - "ducklett" + - "swanna" + - "vanillite" + - "vanillish" + - "vanilluxe" + - "deerling" + - "sawsbuck" + - "emolga" + - "karrablast" + - "escavalier" + - "foongus" + - "amoonguss" + - "frillish" + - "jellicent" + - "alomomola" + - "joltik" + - "galvantula" + - "ferroseed" + - "ferrothorn" + - "klink" + - "klang" + - "klinklang" + - "tynamo" + - "eelektrik" + - "eelektross" + - "elgyem" + - "beheeyem" + - "litwick" + - "lampent" + - "chandelure" + - "axew" + - "fraxure" + - "haxorus" + - "cubchoo" + - "beartic" + - "cryogonal" + - "shelmet" + - "accelgor" + - "stunfisk" + - "mienfoo" + - "mienshao" + - "druddigon" + - "golett" + - "golurk" + - "pawniard" + - "bisharp" + - "bouffalant" + - "rufflet" + - "braviary" + - "vullaby" + - "mandibuzz" + - "heatmor" + - "durant" + - "deino" + - "zweilous" + - "hydreigon" + - "larvesta" + - "volcarona" + - "cobalion" + - "terrakion" + - "virizion" + - "tornadus" + - "thundurus" + - "reshiram" + - "zekrom" + - "landorus" + - "kyurem" + - "keldeo" + - "meloetta" + - "genesect" + - "chespin" + - "quilladin" + - "chesnaught" + - "fennekin" + - "braixen" + - "delphox" + - "froakie" + - "frogadier" + - "greninja" + - "bunnelby" + - "diggersby" + - "fletchling" + - "fletchinder" + - "talonflame" + - "scatterbug" + - "spewpa" + - "vivillon" + - "litleo" + - "pyroar" + - "flabebe" + - "floette" + - "florges" + - "skiddo" + - "gogoat" + - "pancham" + - "pangoro" + - "furfrou" + - "espurr" + - "meowstic" + - "honedge" + - "doublade" + - "aegislash" + - "spritzee" + - "aromatisse" + - "swirlix" + - "slurpuff" + - "inkay" + - "malamar" + - "binacle" + - "barbaracle" + - "skrelp" + - "dragalge" + - "clauncher" + - "clawitzer" + - "helioptile" + - "heliolisk" + - "tyrunt" + - "tyrantrum" + - "amaura" + - "aurorus" + - "sylveon" + - "hawlucha" + - "dedenne" + - "carbink" + - "goomy" + - "sliggoo" + - "goodra" + - "klefki" + - "phantump" + - "trevenant" + - "pumpkaboo" + - "gourgeist" + - "bergmite" + - "avalugg" + - "noibat" + - "noivern" + - "xerneas" + - "yveltal" + - "zygarde" + - "diancie" + - "hoopa" + - "volcanion" + - "rowlet" + - "dartrix" + - "decidueye" + - "litten" + - "torracat" + - "incineroar" + - "popplio" + - "brionne" + - "primarina" + - "pikipek" + - "trumbeak" + - "toucannon" + - "yungoos" + - "gumshoos" + - "grubbin" + - "charjabug" + - "vikavolt" + - "crabrawler" + - "crabominable" + - "oricorio" + - "cutiefly" + - "ribombee" + - "rockruff" + - "lycanroc" + - "wishiwashi" + - "mareanie" + - "toxapex" + - "mudbray" + - "mudsdale" + - "dewpider" + - "araquanid" + - "fomantis" + - "lurantis" + - "morelull" + - "shiinotic" + - "salandit" + - "salazzle" + - "stufful" + - "bewear" + - "bounsweet" + - "steenee" + - "tsareena" + - "comfey" + - "oranguru" + - "passimian" + - "wimpod" + - "golisopod" + - "sandygast" + - "palossand" + - "pyukumuku" + - "typenull" + - "silvally" + - "minior" + - "komala" + - "turtonator" + - "togedemaru" + - "mimikyu" + - "bruxish" + - "drampa" + - "dhelmise" + - "jangmo-o" + - "hakamo-o" + - "kommo-o" + - "tapukoko" + - "tapulele" + - "tapubulu" + - "tapufini" + - "cosmog" + - "cosmoem" + - "solgaleo" + - "lunala" + - "nihilego" + - "buzzwole" + - "pheromosa" + - "xurkitree" + - "celesteela" + - "kartana" + - "guzzlord" + - "necrozma" + - "magearna" + - "marshadow" + - "poipole" + - "naganadel" + - "stakataka" + - "blacephalon" + - "zeraora" + - "meltan" + - "melmetal" + - "grookey" + - "thwackey" + - "rillaboom" + - "scorbunny" + - "raboot" + - "cinderace" + - "sobble" + - "drizzile" + - "inteleon" + - "skwovet" + - "greedent" + - "rookidee" + - "corvisquire" + - "corviknight" + - "blipbug" + - "dottler" + - "orbeetle" + - "nickit" + - "thievul" + - "gossifleur" + - "eldegoss" + - "wooloo" + - "dubwool" + - "chewtle" + - "drednaw" + - "yamper" + - "boltund" + - "rolycoly" + - "carkol" + - "coalossal" + - "applin" + - "flapple" + - "appletun" + - "silicobra" + - "sandaconda" + - "cramorant" + - "arrokuda" + - "barraskewda" + - "toxel" + - "toxtricity" + - "sizzlipede" + - "centiskorch" + - "clobbopus" + - "grapploct" + - "sinistea" + - "polteageist" + - "hatenna" + - "hattrem" + - "hatterene" + - "impidimp" + - "morgrem" + - "grimmsnarl" + - "obstagoon" + - "perrserker" + - "cursola" + - "sirfetchd" + - "mrrime" + - "runerigus" + - "milcery" + - "alcremie" + - "falinks" + - "pincurchin" + - "snom" + - "frosmoth" + - "stonjourner" + - "eiscue" + - "indeedee" + - "morpeko" + - "cufant" + - "copperajah" + - "dracozolt" + - "arctozolt" + - "dracovish" + - "arctovish" + - "duraludon" + - "dreepy" + - "drakloak" + - "dragapult" + - "zacian" + - "zamazenta" + - "eternatus" + - "kubfu" + - "urshifu" + - "zarude" + - "regieleki" + - "regidrago" + - "glastrier" + - "spectrier" + - "calyrex" + examples: + - "ditto" + - "luxray" + - "snorlax" + order: 0 + source-senseforce: + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: + "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + order: 1 + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: + "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + order: 2 + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: + "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + format: "date" + order: 4 + sourceType: + title: "senseforce" + const: "senseforce" + enum: + - "senseforce" + order: 0 + type: "string" + source-senseforce-update: + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + order: 0 + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: + "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + order: 1 + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: + "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + order: 2 + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: + "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + format: "date" + order: 4 + source-freshsales: + type: "object" + required: + - "domain_name" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshsales API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain_name: + type: "string" + order: 0 + title: "Domain Name" + examples: + - "mydomain.myfreshworks.com" + description: "The Name of your Freshsales domain" + sourceType: + title: "freshsales" + const: "freshsales" + enum: + - "freshsales" + order: 0 + type: "string" + source-freshsales-update: + type: "object" + required: + - "domain_name" + - "api_key" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshsales API Key. See here. The key is case sensitive." + airbyte_secret: true + domain_name: + type: "string" + order: 0 + title: "Domain Name" + examples: + - "mydomain.myfreshworks.com" + description: "The Name of your Freshsales domain" + source-hubplanner: + title: "Hubplanner Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Hubplanner API key. See https://github.com/hubplanner/API#authentication\ + \ for more details." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "hubplanner" + const: "hubplanner" + enum: + - "hubplanner" + order: 0 + type: "string" + source-hubplanner-update: + title: "Hubplanner Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Hubplanner API key. See https://github.com/hubplanner/API#authentication\ + \ for more details." + airbyte_secret: true + source-square: + title: "Square Spec" + type: "object" + required: + - "is_sandbox" + - "sourceType" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to Square." + type: "object" + order: 0 + oneOf: + - title: "Oauth authentication" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API key" + type: "object" + required: + - "auth_type" + - "api_key" + properties: + auth_type: + type: "string" + const: "API Key" + order: 1 + enum: + - "API Key" + api_key: + type: "string" + title: "API key token" + description: "The API key for a Square application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + is_sandbox: + type: "boolean" + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + default: false + order: 1 + start_date: + type: "string" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If not set, all data will be replicated." + title: "Start Date" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + format: "date" + include_deleted_objects: + type: "boolean" + description: + "In some streams there is an option to include deleted objects\ + \ (Items, Categories, Discounts, Taxes)" + title: "Include Deleted Objects" + default: false + order: 3 + sourceType: + title: "square" + const: "square" + enum: + - "square" + order: 0 + type: "string" + source-square-update: + title: "Square Spec" + type: "object" + required: + - "is_sandbox" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to Square." + type: "object" + order: 0 + oneOf: + - title: "Oauth authentication" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + - title: "API key" + type: "object" + required: + - "auth_type" + - "api_key" + properties: + auth_type: + type: "string" + const: "API Key" + order: 1 + enum: + - "API Key" + api_key: + type: "string" + title: "API key token" + description: "The API key for a Square application" + airbyte_secret: true + is_sandbox: + type: "boolean" + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + default: false + order: 1 + start_date: + type: "string" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If not set, all data will be replicated." + title: "Start Date" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + format: "date" + include_deleted_objects: + type: "boolean" + description: + "In some streams there is an option to include deleted objects\ + \ (Items, Categories, Discounts, Taxes)" + title: "Include Deleted Objects" + default: false + order: 3 + source-paystack: + type: "object" + required: + - "start_date" + - "secret_key" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 0 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set, the connector will always reload data from the past\ + \ N days, where N is the value set here. This is useful if your data is\ + \ updated after creation." + order: 1 + secret_key: + type: "string" + title: "Secret Key" + pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$" + description: + "The Paystack API key (usually starts with 'sk_live_'; find\ + \ yours here)." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "paystack" + const: "paystack" + enum: + - "paystack" + order: 0 + type: "string" + source-paystack-update: + type: "object" + required: + - "start_date" + - "secret_key" + properties: + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 0 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set, the connector will always reload data from the past\ + \ N days, where N is the value set here. This is useful if your data is\ + \ updated after creation." + order: 1 + secret_key: + type: "string" + title: "Secret Key" + pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$" + description: + "The Paystack API key (usually starts with 'sk_live_'; find\ + \ yours here)." + airbyte_secret: true + order: 2 + source-redshift: + title: "Redshift Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "sourceType" + properties: + host: + title: "Host" + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)." + type: "string" + order: 1 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + order: 2 + database: + title: "Database" + description: "Name of the database." + type: "string" + examples: + - "master" + order: 3 + schemas: + title: "Schemas" + description: + "The list of schemas to sync from. Specify one or more explicitly\ + \ or keep empty to process all schemas. Schema names are case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + examples: + - "public" + order: 4 + username: + title: "Username" + description: "Username to use to access the database." + type: "string" + order: 5 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 6 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + sourceType: + title: "redshift" + const: "redshift" + enum: + - "redshift" + order: 0 + type: "string" + source-redshift-update: + title: "Redshift Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + properties: + host: + title: "Host" + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)." + type: "string" + order: 1 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + order: 2 + database: + title: "Database" + description: "Name of the database." + type: "string" + examples: + - "master" + order: 3 + schemas: + title: "Schemas" + description: + "The list of schemas to sync from. Specify one or more explicitly\ + \ or keep empty to process all schemas. Schema names are case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + examples: + - "public" + order: 4 + username: + title: "Username" + description: "Username to use to access the database." + type: "string" + order: 5 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + source-productive: + type: "object" + required: + - "api_key" + - "organization_id" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: + "The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations`\ + \ page" + order: 1 + title: "Organization ID" + sourceType: + title: "productive" + const: "productive" + enum: + - "productive" + order: 0 + type: "string" + source-productive-update: + type: "object" + required: + - "api_key" + - "organization_id" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + organization_id: + type: "string" + description: + "The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations`\ + \ page" + order: 1 + title: "Organization ID" + source-survicate: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "survicate" + const: "survicate" + enum: + - "survicate" + order: 0 + type: "string" + source-survicate-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-braintree: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant ID" + description: + "The unique identifier for your entire gateway account. See\ + \ the docs for more information on how to obtain this ID." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: + "Braintree Public Key. See the docs for more information on how to obtain this key." + name: "Public Key" + type: "string" + private_key: + title: "Private Key" + description: + "Braintree Private Key. See the docs for more information on how to obtain this key." + name: "Private Key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + name: "Start Date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + title: "Environment" + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + sourceType: + title: "braintree" + const: "braintree" + enum: + - "braintree" + order: 0 + type: "string" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + - "sourceType" + source-braintree-update: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant ID" + description: + "The unique identifier for your entire gateway account. See\ + \ the docs for more information on how to obtain this ID." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: + "Braintree Public Key. See the docs for more information on how to obtain this key." + name: "Public Key" + type: "string" + private_key: + title: "Private Key" + description: + "Braintree Private Key. See the docs for more information on how to obtain this key." + name: "Private Key" + airbyte_secret: true + type: "string" + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + name: "Start Date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + title: "Environment" + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + source-mailchimp: + title: "Mailchimp Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + type: "object" + title: "Authentication" + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "auth_type" + - "access_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "An access token generated using the above client ID\ + \ and secret." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Key" + required: + - "auth_type" + - "apikey" + properties: + auth_type: + type: "string" + const: "apikey" + order: 1 + enum: + - "apikey" + apikey: + type: "string" + title: "API Key" + description: + "Mailchimp API Key. See the docs for information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Incremental Sync Start Date" + description: + "The date from which you want to start syncing data for Incremental\ + \ streams. Only records that have been created or modified since this\ + \ date will be synced. If left blank, all data will by synced." + type: "string" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-01-01T00:00:00.000Z" + sourceType: + title: "mailchimp" + const: "mailchimp" + enum: + - "mailchimp" + order: 0 + type: "string" + source-mailchimp-update: + title: "Mailchimp Spec" + type: "object" + required: [] + properties: + credentials: + type: "object" + title: "Authentication" + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "auth_type" + - "access_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "An access token generated using the above client ID\ + \ and secret." + airbyte_secret: true + - type: "object" + title: "API Key" + required: + - "auth_type" + - "apikey" + properties: + auth_type: + type: "string" + const: "apikey" + order: 1 + enum: + - "apikey" + apikey: + type: "string" + title: "API Key" + description: + "Mailchimp API Key. See the docs for information on how to generate this key." + airbyte_secret: true + start_date: + title: "Incremental Sync Start Date" + description: + "The date from which you want to start syncing data for Incremental\ + \ streams. Only records that have been created or modified since this\ + \ date will be synced. If left blank, all data will by synced." + type: "string" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-01-01T00:00:00.000Z" + source-airtable: + title: "Airtable Source Spec" + type: "object" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Personal Access Token" + type: "object" + required: + - "api_key" + properties: + auth_method: + type: "string" + const: "api_key" + enum: + - "api_key" + api_key: + type: "string" + description: + "The Personal Access Token for the Airtable account.\ + \ See the Support Guide for more information on how to obtain this token." + title: "Personal Access Token" + airbyte_secret: true + examples: + - "key1234567890" + x-speakeasy-param-sensitive: true + sourceType: + title: "airtable" + const: "airtable" + enum: + - "airtable" + order: 0 + type: "string" + source-airtable-update: + title: "Airtable Source Spec" + type: "object" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + - title: "Personal Access Token" + type: "object" + required: + - "api_key" + properties: + auth_method: + type: "string" + const: "api_key" + enum: + - "api_key" + api_key: + type: "string" + description: + "The Personal Access Token for the Airtable account.\ + \ See the Support Guide for more information on how to obtain this token." + title: "Personal Access Token" + airbyte_secret: true + examples: + - "key1234567890" + source-mssql: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "sourceType" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 0 + certificate: + title: "Certificate" + type: "string" + description: + "certificate of the server, or of the CA that signed\ + \ the server certificate" + order: 1 + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 3600 + order: 3 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 4 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 5 + min: 1000 + max: 10000 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 6 + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + source-mssql-update: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 0 + certificate: + title: "Certificate" + type: "string" + description: + "certificate of the server, or of the CA that signed\ + \ the server certificate" + order: 1 + airbyte_secret: true + multiline: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 3600 + order: 3 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 4 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 5 + min: 1000 + max: 10000 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 6 + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-dynamodb: + title: "Dynamodb Source Spec" + type: "object" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Access Keys" + type: + - "null" + - "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "User" + order: 0 + enum: + - "User" + access_key_id: + order: 1 + title: "Dynamodb Key Id" + type: "string" + description: + "The access key id to access Dynamodb. Airbyte requires\ + \ read permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + x-speakeasy-param-sensitive: true + secret_access_key: + order: 2 + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + x-speakeasy-param-sensitive: true + - type: "object" + title: "Role Based Authentication" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Role" + order: 0 + enum: + - "Role" + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + reserved_attribute_names: + title: "Reserved attribute names" + type: "string" + description: "Comma separated reserved attribute names present in your tables" + airbyte_secret: true + examples: + - "name, field_name, field-name" + x-speakeasy-param-sensitive: true + ignore_missing_read_permissions_tables: + title: "Ignore missing read permissions tables" + type: "boolean" + description: "Ignore tables with missing scan/read permissions" + default: false + sourceType: + title: "dynamodb" + const: "dynamodb" + enum: + - "dynamodb" + order: 0 + type: "string" + source-dynamodb-update: + title: "Dynamodb Source Spec" + type: "object" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Access Keys" + type: + - "null" + - "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "User" + order: 0 + enum: + - "User" + access_key_id: + order: 1 + title: "Dynamodb Key Id" + type: "string" + description: + "The access key id to access Dynamodb. Airbyte requires\ + \ read permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + order: 2 + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - type: "object" + title: "Role Based Authentication" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Role" + order: 0 + enum: + - "Role" + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + reserved_attribute_names: + title: "Reserved attribute names" + type: "string" + description: "Comma separated reserved attribute names present in your tables" + airbyte_secret: true + examples: + - "name, field_name, field-name" + ignore_missing_read_permissions_tables: + title: "Ignore missing read permissions tables" + type: "boolean" + description: "Ignore tables with missing scan/read permissions" + default: false + source-kissmetrics: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "kissmetrics" + const: "kissmetrics" + enum: + - "kissmetrics" + order: 0 + type: "string" + source-kissmetrics-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + source-salesforce: + title: "Salesforce Source Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + is_sandbox: + title: "Sandbox" + description: + "Toggle if you're using a Salesforce Sandbox" + type: "boolean" + default: false + order: 1 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce\ + \ account." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\ + \ format. Airbyte will replicate the data updated on and after this date.\ + \ If this field is blank, Airbyte will replicate the data for last two\ + \ years." + type: "string" + pattern: "^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?)$" + pattern_descriptor: "YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ" + examples: + - "2021-07-25" + - "2021-07-25T00:00:00Z" + format: "date-time" + order: 5 + force_use_bulk_api: + title: "Force to use BULK API" + type: "boolean" + description: + "Toggle to use Bulk API (this might cause empty fields for\ + \ some streams)" + default: false + order: 6 + stream_slice_step: + title: "Stream Slice Step for Incremental sync" + type: "string" + description: "The size of the time window (ISO8601 duration) to slice requests." + default: "P30D" + order: 7 + examples: + - "PT12H" + - "P7D" + - "P30D" + - "P1M" + - "P1Y" + streams_criteria: + type: "array" + order: 8 + items: + type: "object" + required: + - "criteria" + - "value" + properties: + criteria: + type: "string" + title: "Search criteria" + enum: + - "starts with" + - "ends with" + - "contains" + - "exacts" + - "starts not with" + - "ends not with" + - "not contains" + - "not exacts" + order: 1 + default: "contains" + value: + type: "string" + title: "Search value" + order: 2 + title: "Filter Salesforce Objects" + description: + "Add filters to select only required stream based on `SObject`\ + \ name. Use this field to filter which tables are displayed by this connector.\ + \ This is useful if your Salesforce account has a large number of tables\ + \ (>1000), in which case you may find it easier to navigate the UI and\ + \ speed up the connector's performance if you restrict the tables displayed\ + \ by this connector." + sourceType: + title: "salesforce" + const: "salesforce" + enum: + - "salesforce" + order: 0 + type: "string" + source-salesforce-update: + title: "Salesforce Source Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + is_sandbox: + title: "Sandbox" + description: + "Toggle if you're using a Salesforce Sandbox" + type: "boolean" + default: false + order: 1 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + refresh_token: + title: "Refresh Token" + description: + "Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce\ + \ account." + type: "string" + airbyte_secret: true + order: 4 + start_date: + title: "Start Date" + description: + "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\ + \ format. Airbyte will replicate the data updated on and after this date.\ + \ If this field is blank, Airbyte will replicate the data for last two\ + \ years." + type: "string" + pattern: "^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?)$" + pattern_descriptor: "YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ" + examples: + - "2021-07-25" + - "2021-07-25T00:00:00Z" + format: "date-time" + order: 5 + force_use_bulk_api: + title: "Force to use BULK API" + type: "boolean" + description: + "Toggle to use Bulk API (this might cause empty fields for\ + \ some streams)" + default: false + order: 6 + stream_slice_step: + title: "Stream Slice Step for Incremental sync" + type: "string" + description: "The size of the time window (ISO8601 duration) to slice requests." + default: "P30D" + order: 7 + examples: + - "PT12H" + - "P7D" + - "P30D" + - "P1M" + - "P1Y" + streams_criteria: + type: "array" + order: 8 + items: + type: "object" + required: + - "criteria" + - "value" + properties: + criteria: + type: "string" + title: "Search criteria" + enum: + - "starts with" + - "ends with" + - "contains" + - "exacts" + - "starts not with" + - "ends not with" + - "not contains" + - "not exacts" + order: 1 + default: "contains" + value: + type: "string" + title: "Search value" + order: 2 + title: "Filter Salesforce Objects" + description: + "Add filters to select only required stream based on `SObject`\ + \ name. Use this field to filter which tables are displayed by this connector.\ + \ This is useful if your Salesforce account has a large number of tables\ + \ (>1000), in which case you may find it easier to navigate the UI and\ + \ speed up the connector's performance if you restrict the tables displayed\ + \ by this connector." + source-clickhouse: + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "sourceType" + properties: + host: + description: "The host endpoint of the Clickhouse cluster." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "default" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with this username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "clickhouse" + const: "clickhouse" + enum: + - "clickhouse" + order: 0 + type: "string" + source-clickhouse-update: + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + description: "The host endpoint of the Clickhouse cluster." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "default" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with this username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-postmarkapp: + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + - "sourceType" + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + x-speakeasy-param-sensitive: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "postmarkapp" + const: "postmarkapp" + enum: + - "postmarkapp" + order: 0 + type: "string" + source-postmarkapp-update: + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + source-bitly: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "bitly" + const: "bitly" + enum: + - "bitly" + order: 0 + type: "string" + source-bitly-update: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-hardcoded-records: + title: "Hardcoded Records Source Spec" + type: "object" + required: + - "sourceType" + properties: + count: + title: "Count" + description: "How many records per stream should be generated" + type: "integer" + minimum: 1 + default: 1000 + order: 0 + sourceType: + title: "hardcoded-records" + const: "hardcoded-records" + enum: + - "hardcoded-records" + order: 0 + type: "string" + source-hardcoded-records-update: + title: "Hardcoded Records Source Spec" + type: "object" + required: [] + properties: + count: + title: "Count" + description: "How many records per stream should be generated" + type: "integer" + minimum: 1 + default: 1000 + order: 0 + source-faker: + title: "Faker Source Spec" + type: "object" + required: + - "sourceType" + properties: + count: + title: "Count" + description: + "How many users should be generated in total. The purchases\ + \ table will be scaled to match, with 10 purchases created per 10 users.\ + \ This setting does not apply to the products stream." + type: "integer" + minimum: 1 + default: 1000 + order: 0 + seed: + title: "Seed" + description: + "Manually control the faker random seed to return the same\ + \ values on subsequent runs (leave -1 for random)" + type: "integer" + default: -1 + order: 1 + records_per_slice: + title: "Records Per Stream Slice" + description: + "How many fake records will be in each page (stream slice),\ + \ before a state message is emitted?" + type: "integer" + minimum: 1 + default: 1000 + order: 2 + always_updated: + title: "Always Updated" + description: + "Should the updated_at values for every record be new each\ + \ sync? Setting this to false will case the source to stop emitting records\ + \ after COUNT records have been emitted." + type: "boolean" + default: true + parallelism: + title: "Parallelism" + description: + "How many parallel workers should we use to generate fake data?\ + \ Choose a value equal to the number of CPUs you will allocate to this\ + \ source." + type: "integer" + minimum: 1 + default: 4 + order: 4 + sourceType: + title: "faker" + const: "faker" + enum: + - "faker" + order: 0 + type: "string" + source-faker-update: + title: "Faker Source Spec" + type: "object" + required: [] + properties: + count: + title: "Count" + description: + "How many users should be generated in total. The purchases\ + \ table will be scaled to match, with 10 purchases created per 10 users.\ + \ This setting does not apply to the products stream." + type: "integer" + minimum: 1 + default: 1000 + order: 0 + seed: + title: "Seed" + description: + "Manually control the faker random seed to return the same\ + \ values on subsequent runs (leave -1 for random)" + type: "integer" + default: -1 + order: 1 + records_per_slice: + title: "Records Per Stream Slice" + description: + "How many fake records will be in each page (stream slice),\ + \ before a state message is emitted?" + type: "integer" + minimum: 1 + default: 1000 + order: 2 + always_updated: + title: "Always Updated" + description: + "Should the updated_at values for every record be new each\ + \ sync? Setting this to false will case the source to stop emitting records\ + \ after COUNT records have been emitted." + type: "boolean" + default: true + parallelism: + title: "Parallelism" + description: + "How many parallel workers should we use to generate fake data?\ + \ Choose a value equal to the number of CPUs you will allocate to this\ + \ source." + type: "integer" + minimum: 1 + default: 4 + order: 4 + source-lever-hiring: + title: "Lever Hiring Source Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + order: 3 + title: "Authentication Mechanism" + description: "Choose how to authenticate to Lever Hiring." + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Lever (OAuth)" + required: + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate via Lever (Api Key)" + required: + - "api_key" + properties: + auth_type: + type: "string" + const: "Api Key" + order: 0 + enum: + - "Api Key" + api_key: + title: "Api key" + type: "string" + description: "The Api Key of your Lever Hiring account." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + order: 0 + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. Note that it will be used\ + \ only in the following incremental streams: comments, commits, and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + environment: + order: 1 + type: "string" + title: "Environment" + description: + "The environment in which you'd like to replicate data for\ + \ Lever. This is used to determine which Lever API endpoint to use." + default: "Sandbox" + enum: + - "Production" + - "Sandbox" + sourceType: + title: "lever-hiring" + const: "lever-hiring" + enum: + - "lever-hiring" + order: 0 + type: "string" + source-lever-hiring-update: + title: "Lever Hiring Source Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + order: 3 + title: "Authentication Mechanism" + description: "Choose how to authenticate to Lever Hiring." + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Lever (OAuth)" + required: + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining new access token." + airbyte_secret: true + - type: "object" + title: "Authenticate via Lever (Api Key)" + required: + - "api_key" + properties: + auth_type: + type: "string" + const: "Api Key" + order: 0 + enum: + - "Api Key" + api_key: + title: "Api key" + type: "string" + description: "The Api Key of your Lever Hiring account." + airbyte_secret: true + order: 1 + start_date: + order: 0 + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. Note that it will be used\ + \ only in the following incremental streams: comments, commits, and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + environment: + order: 1 + type: "string" + title: "Environment" + description: + "The environment in which you'd like to replicate data for\ + \ Lever. This is used to determine which Lever API endpoint to use." + default: "Sandbox" + enum: + - "Production" + - "Sandbox" + source-braze: + title: "Braze Spec" + type: "object" + required: + - "url" + - "api_key" + - "start_date" + - "sourceType" + properties: + url: + type: "string" + title: "URL" + description: "Braze REST API endpoint" + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Braze REST API key" + x-speakeasy-param-sensitive: true + start_date: + type: "string" + format: "date" + title: "Start date" + description: "Rows after this date will be synced" + sourceType: + title: "braze" + const: "braze" + enum: + - "braze" + order: 0 + type: "string" + source-braze-update: + title: "Braze Spec" + type: "object" + required: + - "url" + - "api_key" + - "start_date" + properties: + url: + type: "string" + title: "URL" + description: "Braze REST API endpoint" + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Braze REST API key" + start_date: + type: "string" + format: "date" + title: "Start date" + description: "Rows after this date will be synced" + source-sftp: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + - "sourceType" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + sourceType: + title: "sftp" + const: "sftp" + enum: + - "sftp" + order: 0 + type: "string" + source-sftp-update: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + source-google-drive: + title: "Google Drive Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + folder_url: + title: "Folder Url" + description: + "URL for the folder you want to sync. Using individual streams\ + \ and glob patterns, it's possible to only sync a subset of all files\ + \ located in the folder." + examples: + - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn" + order: 0 + pattern: "^https://drive.google.com/.+" + pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID" + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Google Drive API" + type: "object" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + service_account_info: + title: "Service Account Information" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "service_account_info" + - "auth_type" + sourceType: + title: "google-drive" + const: "google-drive" + enum: + - "google-drive" + order: 0 + type: "string" + required: + - "streams" + - "folder_url" + - "credentials" + - "sourceType" + source-google-drive-update: + title: "Google Drive Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + folder_url: + title: "Folder Url" + description: + "URL for the folder you want to sync. Using individual streams\ + \ and glob patterns, it's possible to only sync a subset of all files\ + \ located in the folder." + examples: + - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn" + order: 0 + pattern: "^https://drive.google.com/.+" + pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID" + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Google Drive API" + type: "object" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token for the Google Drive API" + airbyte_secret: true + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + service_account_info: + title: "Service Account Information" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + type: "string" + required: + - "service_account_info" + - "auth_type" + required: + - "streams" + - "folder_url" + - "credentials" + source-mailjet-sms: + type: "object" + required: + - "token" + - "sourceType" + properties: + end_date: + type: "integer" + title: "End date" + description: + "Retrieve SMS messages created before the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666281656 + order: 0 + start_date: + type: "integer" + title: "Start date" + description: + "Retrieve SMS messages created after the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666261656 + order: 1 + token: + type: "string" + title: "Access Token" + description: + "Your access token. See here." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "mailjet-sms" + const: "mailjet-sms" + enum: + - "mailjet-sms" + order: 0 + type: "string" + source-mailjet-sms-update: + type: "object" + required: + - "token" + properties: + end_date: + type: "integer" + title: "End date" + description: + "Retrieve SMS messages created before the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666281656 + order: 0 + start_date: + type: "integer" + title: "Start date" + description: + "Retrieve SMS messages created after the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666261656 + order: 1 + token: + type: "string" + title: "Access Token" + description: + "Your access token. See here." + airbyte_secret: true + order: 2 + source-chameleon: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + filter: + type: "string" + description: "Filter for using in the `segments_experiences` stream" + enum: + - "tour" + - "survey" + - "launcher" + order: 3 + title: "Filter" + default: "tour" + end_date: + type: "string" + order: 4 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "chameleon" + const: "chameleon" + enum: + - "chameleon" + order: 0 + type: "string" + source-chameleon-update: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + filter: + type: "string" + description: "Filter for using in the `segments_experiences` stream" + enum: + - "tour" + - "survey" + - "launcher" + order: 3 + title: "Filter" + default: "tour" + end_date: + type: "string" + order: 4 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-gcs: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be\nmodified to uptake the changes because it is responsible for\ + \ converting\nlegacy GCS configs into file based configs using the File-Based\ + \ CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + service_account: + title: "Service Account Information" + description: + "Enter your Google Cloud service account key in JSON format" + airbyte_secret: true + order: 0 + type: "string" + x-speakeasy-param-sensitive: true + bucket: + title: "Bucket" + description: "Name of the GCS bucket where the file(s) exist." + order: 2 + type: "string" + sourceType: + title: "gcs" + const: "gcs" + enum: + - "gcs" + order: 0 + type: "string" + required: + - "streams" + - "service_account" + - "bucket" + - "sourceType" + source-gcs-update: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be\nmodified to uptake the changes because it is responsible for\ + \ converting\nlegacy GCS configs into file based configs using the File-Based\ + \ CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + service_account: + title: "Service Account Information" + description: + "Enter your Google Cloud service account key in JSON format" + airbyte_secret: true + order: 0 + type: "string" + bucket: + title: "Bucket" + description: "Name of the GCS bucket where the file(s) exist." + order: 2 + type: "string" + required: + - "streams" + - "service_account" + - "bucket" + source-basecamp: + type: "object" + required: + - "account_id" + - "start_date" + - "client_id" + - "client_secret" + - "client_refresh_token_2" + - "sourceType" + properties: + account_id: + type: "number" + order: 0 + title: "Account ID" + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_id: + type: "string" + title: "Client ID" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + client_refresh_token_2: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "basecamp" + const: "basecamp" + enum: + - "basecamp" + order: 0 + type: "string" + source-basecamp-update: + type: "object" + required: + - "account_id" + - "start_date" + - "client_id" + - "client_secret" + - "client_refresh_token_2" + properties: + account_id: + type: "number" + order: 0 + title: "Account ID" + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_id: + type: "string" + title: "Client ID" + airbyte_secret: true + order: 2 + client_secret: + type: "string" + title: "Client secret" + airbyte_secret: true + order: 3 + client_refresh_token_2: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 4 + source-qualaroo: + title: "Qualaroo Spec" + type: "object" + required: + - "token" + - "key" + - "start_date" + - "sourceType" + properties: + token: + type: "string" + title: "API token" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + key: + type: "string" + title: "API key" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00.000Z" + survey_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all surveys to which you have access will be\ + \ replicated." + sourceType: + title: "qualaroo" + const: "qualaroo" + enum: + - "qualaroo" + order: 0 + type: "string" + source-qualaroo-update: + title: "Qualaroo Spec" + type: "object" + required: + - "token" + - "key" + - "start_date" + properties: + token: + type: "string" + title: "API token" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + key: + type: "string" + title: "API key" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00.000Z" + survey_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all surveys to which you have access will be\ + \ replicated." + source-nytimes: + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + sourceType: + title: "nytimes" + const: "nytimes" + enum: + - "nytimes" + order: 0 + type: "string" + source-nytimes-update: + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + source-greenhouse: + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "greenhouse" + const: "greenhouse" + enum: + - "greenhouse" + order: 0 + type: "string" + source-greenhouse-update: + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + order: 0 + source-front: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + page_limit: + type: "string" + description: "Page limit for the responses" + title: "Page limit" + default: "50" + order: 2 + sourceType: + title: "front" + const: "front" + enum: + - "front" + order: 0 + type: "string" + source-front-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + page_limit: + type: "string" + description: "Page limit for the responses" + title: "Page limit" + default: "50" + order: 2 + trello: + title: null + zendesk-chat: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Zendesk Chat Spec" + google-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + title: "Google Ads Spec" + google-search-console: + properties: + authorization: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + title: "Google Search Console Spec" + shopify: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + title: "Shopify Source CDK Specifications" + retently: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + title: "Retently Api Spec" + instagram: + properties: + client_id: + title: "Client Id" + description: "The Client ID for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Instagram" + azure-blob-storage: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "SourceAzureBlobStorageSpec" + zendesk-sunshine: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: null + snapchat-marketing: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + title: "Snapchat Marketing Spec" + gitlab: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + title: "Source Gitlab Spec" + snowflake: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + title: "Snowflake Source Spec" + microsoft-sharepoint: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft SharePoint Source Spec" + smartsheets: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + title: "Smartsheets Source Spec" + notion: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + title: "Notion Source Spec" + slack: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + title: "Slack Spec" + youtube-analytics: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + title: "YouTube Analytics Spec" + google-sheets: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + title: "Google Sheets Source Spec" + zendesk-talk: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + title: "Source Zendesk Talk Spec" + asana: + properties: + credentials: + properties: + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + title: "Asana Spec" + microsoft-teams: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Microsoft Teams developer application." + airbyte_secret: true + title: "Microsoft Teams Spec" + amazon-seller-partner: + properties: + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + title: "Amazon Seller Partner Spec" + linkedin-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + title: "Linkedin Ads Spec" + pinterest: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Pinterest Spec" + zendesk-support: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + title: "Source Zendesk Support Spec" + microsoft-onedrive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft OneDrive Source Spec" + tiktok-marketing: + properties: + credentials: + properties: + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + title: "TikTok Marketing Source Spec" + hubspot: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application. See\ + \ the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + title: "HubSpot Source Spec" + google-analytics-data-api: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Analytics developer application." + airbyte_secret: true + order: 2 + title: "Google Analytics (Data API) Spec" + intercom: + properties: + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + title: "Source Intercom Spec" + typeform: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + title: null + facebook-marketing: + properties: + credentials: + properties: + client_id: + title: "Client Id" + description: "The Client Id for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Facebook Marketing" + surveymonkey: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + title: null + bing-ads: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + title: "Bing Ads Spec" + monday: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Monday Spec" + amazon-ads: + properties: + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + title: "Amazon Ads Spec" + github: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + title: "GitHub Source Spec" + square: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + title: "Square Spec" + mailchimp: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Mailchimp Spec" + airtable: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + title: "Airtable Source Spec" + salesforce: + properties: + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + title: "Salesforce Source Spec" + lever-hiring: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + title: "Lever Hiring Source Spec" + google-drive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + title: "Google Drive Source Spec" + destination-gcs: + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + - "format" + - "destinationType" + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + order: 1 + type: "string" + description: + "You can find the bucket name in the App Engine Admin console\ + \ Application Settings page, under the label Google Cloud Storage Bucket.\ + \ Read more here." + examples: + - "airbyte_sync" + gcs_bucket_path: + title: "GCS Bucket Path" + description: + "GCS Bucket Path string Subdirectory under the above bucket\ + \ to sync the data into." + order: 2 + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + order: 3 + default: "us" + description: + "Select a Region of the GCS Bucket. Read more here." + enum: + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "southamerica-east1" + - "southamerica-west1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "asia" + - "eu" + - "us" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Authentication" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read more\ + \ here." + type: "object" + order: 0 + oneOf: + - title: "HMAC Key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "When linked to a service account, this ID is 61 characters\ + \ long; when linked to a user account, it is 24 characters long.\ + \ Read more here." + title: "Access ID" + airbyte_secret: true + order: 0 + examples: + - "1234567890abcdefghij1234" + x-speakeasy-param-sensitive: true + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string. Read more here." + title: "Secret" + airbyte_secret: true + order: 1 + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + x-speakeasy-param-sensitive: true + format: + title: "Output Format" + type: "object" + description: + "Output data format. One of the following formats must be selected\ + \ - AVRO format, PARQUET format, CSV format, or JSONL format." + order: 4 + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "The presets 0-3 are fast presets with medium compression.\ + \ The presets 4-6 are fairly slow presets with high compression.\ + \ The default preset is 6. The presets 7-9 are like the preset\ + \ 6 but use bigger dictionaries and have higher compressor\ + \ and decompressor memory requirements. Unless the uncompressed\ + \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\ + \ waste of memory to use the presets 7, 8, or 9, respectively.\ + \ Read more here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization" + description: + "Whether the input JSON data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + default: "UNCOMPRESSED" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + destinationType: + title: "gcs" + const: "gcs" + enum: + - "gcs" + order: 0 + type: "string" + destination-gcs-update: + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + - "format" + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + order: 1 + type: "string" + description: + "You can find the bucket name in the App Engine Admin console\ + \ Application Settings page, under the label Google Cloud Storage Bucket.\ + \ Read more here." + examples: + - "airbyte_sync" + gcs_bucket_path: + title: "GCS Bucket Path" + description: + "GCS Bucket Path string Subdirectory under the above bucket\ + \ to sync the data into." + order: 2 + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + order: 3 + default: "us" + description: + "Select a Region of the GCS Bucket. Read more here." + enum: + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "southamerica-east1" + - "southamerica-west1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "asia" + - "eu" + - "us" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Authentication" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read more\ + \ here." + type: "object" + order: 0 + oneOf: + - title: "HMAC Key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "When linked to a service account, this ID is 61 characters\ + \ long; when linked to a user account, it is 24 characters long.\ + \ Read more here." + title: "Access ID" + airbyte_secret: true + order: 0 + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string. Read more here." + title: "Secret" + airbyte_secret: true + order: 1 + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + format: + title: "Output Format" + type: "object" + description: + "Output data format. One of the following formats must be selected\ + \ - AVRO format, PARQUET format, CSV format, or JSONL format." + order: 4 + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "The presets 0-3 are fast presets with medium compression.\ + \ The presets 4-6 are fairly slow presets with high compression.\ + \ The default preset is 6. The presets 7-9 are like the preset\ + \ 6 but use bigger dictionaries and have higher compressor\ + \ and decompressor memory requirements. Unless the uncompressed\ + \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\ + \ waste of memory to use the presets 7, 8, or 9, respectively.\ + \ Read more here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization" + description: + "Whether the input JSON data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + default: "UNCOMPRESSED" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + destination-clickhouse: + title: "ClickHouse Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "HTTP port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: false + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "clickhouse" + const: "clickhouse" + enum: + - "clickhouse" + order: 0 + type: "string" + destination-clickhouse-update: + title: "ClickHouse Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "HTTP port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: false + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-mssql: + title: "MS SQL Server Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "The host name of the MSSQL database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the MSSQL database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1433 + examples: + - "1433" + order: 1 + database: + title: "DB Name" + description: "The name of the MSSQL database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with this username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used to communicate with the\ + \ database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "The data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + destination-mssql-update: + title: "MS SQL Server Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "The host name of the MSSQL database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the MSSQL database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1433 + examples: + - "1433" + order: 1 + database: + title: "DB Name" + description: "The name of the MSSQL database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with this username." + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used to communicate with the\ + \ database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "The data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-mysql: + title: "MySQL Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + raw_data_schema: + type: "string" + description: "The database to write raw tables into" + title: "Raw table database (defaults to airbyte_internal)" + order: 7 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mysql" + const: "mysql" + enum: + - "mysql" + order: 0 + type: "string" + destination-mysql-update: + title: "MySQL Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + raw_data_schema: + type: "string" + description: "The database to write raw tables into" + title: "Raw table database (defaults to airbyte_internal)" + order: 7 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-pubsub: + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + - "ordering_enabled" + - "batching_enabled" + - "destinationType" + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub." + title: "Project ID" + topic_id: + type: "string" + description: "The PubSub topic ID in the given GCP project ID." + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + ordering_enabled: + title: "Message Ordering Enabled" + description: + "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\ + \ of stream" + type: "boolean" + default: false + batching_enabled: + type: "boolean" + title: "Message Batching Enabled" + description: + "If TRUE messages will be buffered instead of sending them\ + \ one by one" + default: false + batching_delay_threshold: + type: "integer" + title: "Message Batching: Delay Threshold" + description: "Number of ms before the buffer is flushed" + default: 1 + minimum: 1 + batching_element_count_threshold: + type: "integer" + title: "Message Batching: Element Count Threshold" + description: "Number of messages before the buffer is flushed" + default: 1 + minimum: 1 + batching_request_bytes_threshold: + type: "integer" + title: "Message Batching: Request Bytes Threshold" + description: "Number of bytes before the buffer is flushed" + default: 1 + minimum: 1 + destinationType: + title: "pubsub" + const: "pubsub" + enum: + - "pubsub" + order: 0 + type: "string" + destination-pubsub-update: + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + - "ordering_enabled" + - "batching_enabled" + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub." + title: "Project ID" + topic_id: + type: "string" + description: "The PubSub topic ID in the given GCP project ID." + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + ordering_enabled: + title: "Message Ordering Enabled" + description: + "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\ + \ of stream" + type: "boolean" + default: false + batching_enabled: + type: "boolean" + title: "Message Batching Enabled" + description: + "If TRUE messages will be buffered instead of sending them\ + \ one by one" + default: false + batching_delay_threshold: + type: "integer" + title: "Message Batching: Delay Threshold" + description: "Number of ms before the buffer is flushed" + default: 1 + minimum: 1 + batching_element_count_threshold: + type: "integer" + title: "Message Batching: Element Count Threshold" + description: "Number of messages before the buffer is flushed" + default: 1 + minimum: 1 + batching_request_bytes_threshold: + type: "integer" + title: "Message Batching: Request Bytes Threshold" + description: "Number of bytes before the buffer is flushed" + default: 1 + minimum: 1 + destination-weaviate: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "No external embedding" + type: "object" + properties: + mode: + title: "Mode" + default: "no_embedding" + const: "no_embedding" + enum: + - "no_embedding" + type: "string" + description: + "Do not calculate and pass embeddings to Weaviate. Suitable\ + \ for clusters with configured vectorizers to calculate embeddings within\ + \ Weaviate or for classes that should only support regular text search." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + - "mode" + description: + "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Weaviate cluster." + order: 1 + examples: + - "https://my-cluster.weaviate.network" + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Weaviate instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Weaviate\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Weaviate cluster" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Weaviate cluster" + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Weaviate clusters)" + - title: "No Authentication" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + batch_size: + title: "Batch Size" + description: "The number of records to send to Weaviate in each batch" + default: 128 + type: "integer" + text_field: + title: "Text Field" + description: "The field in the object that contains the embedded text" + default: "text" + type: "string" + tenant_id: + title: "Tenant ID" + description: "The tenant ID to use for multi tenancy" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + default_vectorizer: + title: "Default Vectorizer" + description: "The vectorizer to use if new classes need to be created" + default: "none" + enum: + - "none" + - "text2vec-cohere" + - "text2vec-huggingface" + - "text2vec-openai" + - "text2vec-palm" + - "text2vec-contextionary" + - "text2vec-transformers" + - "text2vec-gpt4all" + type: "string" + additional_headers: + title: "Additional headers" + description: "Additional HTTP headers to send with every request." + default: [] + examples: + - header_key: "X-OpenAI-Api-Key" + value: "my-openai-api-key" + type: "array" + items: + title: "Header" + type: "object" + properties: + header_key: + title: "Header Key" + type: "string" + value: + title: "Header Value" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "header_key" + - "value" + required: + - "host" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "weaviate" + const: "weaviate" + enum: + - "weaviate" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-weaviate-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "No external embedding" + type: "object" + properties: + mode: + title: "Mode" + default: "no_embedding" + const: "no_embedding" + enum: + - "no_embedding" + type: "string" + description: + "Do not calculate and pass embeddings to Weaviate. Suitable\ + \ for clusters with configured vectorizers to calculate embeddings within\ + \ Weaviate or for classes that should only support regular text search." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + - "mode" + description: + "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Weaviate cluster." + order: 1 + examples: + - "https://my-cluster.weaviate.network" + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Weaviate instance" + airbyte_secret: true + type: "string" + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Weaviate\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Weaviate cluster" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Weaviate cluster" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Weaviate clusters)" + - title: "No Authentication" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + batch_size: + title: "Batch Size" + description: "The number of records to send to Weaviate in each batch" + default: 128 + type: "integer" + text_field: + title: "Text Field" + description: "The field in the object that contains the embedded text" + default: "text" + type: "string" + tenant_id: + title: "Tenant ID" + description: "The tenant ID to use for multi tenancy" + default: "" + airbyte_secret: true + type: "string" + default_vectorizer: + title: "Default Vectorizer" + description: "The vectorizer to use if new classes need to be created" + default: "none" + enum: + - "none" + - "text2vec-cohere" + - "text2vec-huggingface" + - "text2vec-openai" + - "text2vec-palm" + - "text2vec-contextionary" + - "text2vec-transformers" + - "text2vec-gpt4all" + type: "string" + additional_headers: + title: "Additional headers" + description: "Additional HTTP headers to send with every request." + default: [] + examples: + - header_key: "X-OpenAI-Api-Key" + value: "my-openai-api-key" + type: "array" + items: + title: "Header" + type: "object" + properties: + header_key: + title: "Header Key" + type: "string" + value: + title: "Header Value" + airbyte_secret: true + type: "string" + required: + - "header_key" + - "value" + required: + - "host" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-mongodb: + title: "MongoDB Destination Spec" + type: "object" + required: + - "database" + - "auth_type" + - "destinationType" + properties: + instance_type: + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + \ Set TLS connection is used by default." + title: "MongoDb Instance Type" + type: "object" + order: 0 + oneOf: + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "The Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "The Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS Connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + auth_type: + title: "Authorization type" + type: "object" + description: "Authorization type." + oneOf: + - title: "None" + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + enum: + - "none" + - title: "Login/Password" + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + enum: + - "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mongodb" + const: "mongodb" + enum: + - "mongodb" + order: 0 + type: "string" + destination-mongodb-update: + title: "MongoDB Destination Spec" + type: "object" + required: + - "database" + - "auth_type" + properties: + instance_type: + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + \ Set TLS connection is used by default." + title: "MongoDb Instance Type" + type: "object" + order: 0 + oneOf: + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "The Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "The Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS Connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + auth_type: + title: "Authorization type" + type: "object" + description: "Authorization type." + oneOf: + - title: "None" + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + enum: + - "none" + - title: "Login/Password" + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + enum: + - "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-vectara: + title: "Vectara Config" + type: "object" + properties: + oauth2: + title: "OAuth2.0 Credentials" + type: "object" + properties: + client_id: + title: "OAuth Client ID" + description: "OAuth2.0 client id" + order: 0 + type: "string" + client_secret: + title: "OAuth Client Secret" + description: "OAuth2.0 client secret" + airbyte_secret: true + order: 1 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + description: + "OAuth2.0 credentials used to authenticate admin actions (creating/deleting\ + \ corpora)" + group: "auth" + customer_id: + title: "Customer ID" + description: "Your customer id as it is in the authenticaion url" + order: 2 + group: "account" + type: "string" + corpus_name: + title: "Corpus Name" + description: "The Name of Corpus to load data into" + order: 3 + group: "account" + type: "string" + parallelize: + title: "Parallelize" + description: "Parallelize indexing into Vectara with multiple threads" + default: false + always_show: true + group: "account" + type: "boolean" + text_fields: + title: "Text fields to index with Vectara" + description: + "List of fields in the record that should be in the section\ + \ of the document. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all fields\ + \ are considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + title_field: + title: "Text field to use as document title with Vectara" + description: + "A field that will be used to populate the `title` of each\ + \ document. The field list is applied to all streams in the same way and\ + \ non-existing fields are ignored. If none are defined, all fields are\ + \ considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: "" + always_show: true + examples: + - "document_key" + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as metadata.\ + \ The field list is applied to all streams in the same way and non-existing\ + \ fields are ignored. If none are defined, all fields are considered metadata\ + \ fields. When specifying text fields, you can access nested fields in\ + \ the record by using dot notation, e.g. `user.name` will access the `name`\ + \ field in the `user` object. It's also possible to use wildcards to access\ + \ all fields in an object, e.g. `users.*.name` will access all `names`\ + \ fields in all entries of the `users` array. When specifying nested paths,\ + \ all matching values are flattened into an array set to a field named\ + \ by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + type: "array" + items: + type: "string" + destinationType: + title: "vectara" + const: "vectara" + enum: + - "vectara" + order: 0 + type: "string" + required: + - "oauth2" + - "customer_id" + - "corpus_name" + - "destinationType" + description: "Configuration to connect to the Vectara instance" + groups: + - id: "account" + title: "Account" + - id: "auth" + title: "Authentication" + destination-vectara-update: + title: "Vectara Config" + type: "object" + properties: + oauth2: + title: "OAuth2.0 Credentials" + type: "object" + properties: + client_id: + title: "OAuth Client ID" + description: "OAuth2.0 client id" + order: 0 + type: "string" + client_secret: + title: "OAuth Client Secret" + description: "OAuth2.0 client secret" + airbyte_secret: true + order: 1 + type: "string" + required: + - "client_id" + - "client_secret" + description: + "OAuth2.0 credentials used to authenticate admin actions (creating/deleting\ + \ corpora)" + group: "auth" + customer_id: + title: "Customer ID" + description: "Your customer id as it is in the authenticaion url" + order: 2 + group: "account" + type: "string" + corpus_name: + title: "Corpus Name" + description: "The Name of Corpus to load data into" + order: 3 + group: "account" + type: "string" + parallelize: + title: "Parallelize" + description: "Parallelize indexing into Vectara with multiple threads" + default: false + always_show: true + group: "account" + type: "boolean" + text_fields: + title: "Text fields to index with Vectara" + description: + "List of fields in the record that should be in the section\ + \ of the document. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all fields\ + \ are considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + title_field: + title: "Text field to use as document title with Vectara" + description: + "A field that will be used to populate the `title` of each\ + \ document. The field list is applied to all streams in the same way and\ + \ non-existing fields are ignored. If none are defined, all fields are\ + \ considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: "" + always_show: true + examples: + - "document_key" + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as metadata.\ + \ The field list is applied to all streams in the same way and non-existing\ + \ fields are ignored. If none are defined, all fields are considered metadata\ + \ fields. When specifying text fields, you can access nested fields in\ + \ the record by using dot notation, e.g. `user.name` will access the `name`\ + \ field in the `user` object. It's also possible to use wildcards to access\ + \ all fields in an object, e.g. `users.*.name` will access all `names`\ + \ fields in all entries of the `users` array. When specifying nested paths,\ + \ all matching values are flattened into an array set to a field named\ + \ by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + type: "array" + items: + type: "string" + required: + - "oauth2" + - "customer_id" + - "corpus_name" + description: "Configuration to connect to the Vectara instance" + groups: + - id: "account" + title: "Account" + - id: "auth" + title: "Authentication" + destination-s3-glue: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + - "destinationType" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 4 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "Root level flattening" + enum: + - "No flattening" + - "Root level flattening" + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: + "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: + "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + destinationType: + title: "s3-glue" + const: "s3-glue" + enum: + - "s3-glue" + order: 0 + type: "string" + destination-s3-glue-update: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 4 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "Root level flattening" + enum: + - "No flattening" + - "Root level flattening" + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: + "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: + "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + destination-dev-null: + title: "E2E Test Destination Spec" + type: "object" + required: + - "test_destination" + - "destinationType" + properties: + test_destination: + title: "Test Destination" + type: "object" + description: "The type of destination to be used" + oneOf: + - title: "Logging" + required: + - "test_destination_type" + - "logging_config" + properties: + test_destination_type: + type: "string" + const: "LOGGING" + default: "LOGGING" + enum: + - "LOGGING" + logging_config: + title: "Logging Configuration" + type: "object" + description: "Configurate how the messages are logged." + oneOf: + - title: "First N Entries" + description: "Log first N entries per stream." + type: "object" + required: + - "logging_type" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "FirstN" + default: "FirstN" + max_entry_count: + title: "N" + description: + "Number of entries to log. This destination is\ + \ for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Every N-th Entry" + description: + "For each stream, log every N-th entry with a maximum\ + \ cap." + type: "object" + required: + - "logging_type" + - "nth_entry_to_log" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "EveryNth" + default: "EveryNth" + nth_entry_to_log: + title: "N" + description: + "The N-th entry to log for each stream. N starts\ + \ from 1. For example, when N = 1, every entry is logged;\ + \ when N = 2, every other entry is logged; when N = 3, one\ + \ out of three entries is logged." + type: "number" + example: + - 3 + minimum: 1 + maximum: 1000 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Random Sampling" + description: + "For each stream, randomly log a percentage of the\ + \ entries with a maximum cap." + type: "object" + required: + - "logging_type" + - "sampling_ratio" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "RandomSampling" + default: "RandomSampling" + sampling_ratio: + title: "Sampling Ratio" + description: "A positive floating number smaller than 1." + type: "number" + default: 0.001 + examples: + - 0.001 + minimum: 0 + maximum: 1 + seed: + title: "Random Number Generator Seed" + description: + "When the seed is unspecified, the current time\ + \ millis will be used as the seed." + type: "number" + examples: + - 1900 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Silent" + required: + - "test_destination_type" + properties: + test_destination_type: + type: "string" + const: "SILENT" + default: "SILENT" + enum: + - "SILENT" + - title: "Throttled" + required: + - "test_destination_type" + - "millis_per_record" + properties: + test_destination_type: + type: "string" + const: "THROTTLED" + default: "THROTTLED" + enum: + - "THROTTLED" + millis_per_record: + description: "Number of milli-second to pause in between records." + type: "integer" + - title: "Failing" + required: + - "test_destination_type" + - "num_messages" + properties: + test_destination_type: + type: "string" + const: "FAILING" + default: "FAILING" + enum: + - "FAILING" + num_messages: + description: "Number of messages after which to fail." + type: "integer" + destinationType: + title: "dev-null" + const: "dev-null" + enum: + - "dev-null" + order: 0 + type: "string" + destination-dev-null-update: + title: "E2E Test Destination Spec" + type: "object" + required: + - "test_destination" + properties: + test_destination: + title: "Test Destination" + type: "object" + description: "The type of destination to be used" + oneOf: + - title: "Logging" + required: + - "test_destination_type" + - "logging_config" + properties: + test_destination_type: + type: "string" + const: "LOGGING" + default: "LOGGING" + enum: + - "LOGGING" + logging_config: + title: "Logging Configuration" + type: "object" + description: "Configurate how the messages are logged." + oneOf: + - title: "First N Entries" + description: "Log first N entries per stream." + type: "object" + required: + - "logging_type" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "FirstN" + default: "FirstN" + max_entry_count: + title: "N" + description: + "Number of entries to log. This destination is\ + \ for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Every N-th Entry" + description: + "For each stream, log every N-th entry with a maximum\ + \ cap." + type: "object" + required: + - "logging_type" + - "nth_entry_to_log" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "EveryNth" + default: "EveryNth" + nth_entry_to_log: + title: "N" + description: + "The N-th entry to log for each stream. N starts\ + \ from 1. For example, when N = 1, every entry is logged;\ + \ when N = 2, every other entry is logged; when N = 3, one\ + \ out of three entries is logged." + type: "number" + example: + - 3 + minimum: 1 + maximum: 1000 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Random Sampling" + description: + "For each stream, randomly log a percentage of the\ + \ entries with a maximum cap." + type: "object" + required: + - "logging_type" + - "sampling_ratio" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "RandomSampling" + default: "RandomSampling" + sampling_ratio: + title: "Sampling Ratio" + description: "A positive floating number smaller than 1." + type: "number" + default: 0.001 + examples: + - 0.001 + minimum: 0 + maximum: 1 + seed: + title: "Random Number Generator Seed" + description: + "When the seed is unspecified, the current time\ + \ millis will be used as the seed." + type: "number" + examples: + - 1900 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Silent" + required: + - "test_destination_type" + properties: + test_destination_type: + type: "string" + const: "SILENT" + default: "SILENT" + enum: + - "SILENT" + - title: "Throttled" + required: + - "test_destination_type" + - "millis_per_record" + properties: + test_destination_type: + type: "string" + const: "THROTTLED" + default: "THROTTLED" + enum: + - "THROTTLED" + millis_per_record: + description: "Number of milli-second to pause in between records." + type: "integer" + - title: "Failing" + required: + - "test_destination_type" + - "num_messages" + properties: + test_destination_type: + type: "string" + const: "FAILING" + default: "FAILING" + enum: + - "FAILING" + num_messages: + description: "Number of messages after which to fail." + type: "integer" + destination-snowflake-cortex: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Snowflake Connection" + type: "object" + properties: + host: + title: "Host" + description: + "Enter the account name you want to use to access the database.\ + \ This is usually the identifier before .snowflakecomputing.com" + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + role: + title: "Role" + description: "Enter the role that you want to use to access Snowflake" + order: 2 + examples: + - "AIRBYTE_ROLE" + - "ACCOUNTADMIN" + type: "string" + warehouse: + title: "Warehouse" + description: + "Enter the name of the warehouse that you want to use as\ + \ a compute cluster" + order: 3 + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "default_schema" + - "username" + - "credentials" + description: "Snowflake can be used to store vector data and retrieve embeddings." + group: "indexing" + destinationType: + title: "snowflake-cortex" + const: "snowflake-cortex" + enum: + - "snowflake-cortex" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-snowflake-cortex-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Snowflake Connection" + type: "object" + properties: + host: + title: "Host" + description: + "Enter the account name you want to use to access the database.\ + \ This is usually the identifier before .snowflakecomputing.com" + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + role: + title: "Role" + description: "Enter the role that you want to use to access Snowflake" + order: 2 + examples: + - "AIRBYTE_ROLE" + - "ACCOUNTADMIN" + type: "string" + warehouse: + title: "Warehouse" + description: + "Enter the name of the warehouse that you want to use as\ + \ a compute cluster" + order: 3 + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + required: + - "password" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "default_schema" + - "username" + - "credentials" + description: "Snowflake can be used to store vector data and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-timeplus: + title: "Destination Timeplus" + type: "object" + required: + - "endpoint" + - "apikey" + - "destinationType" + properties: + endpoint: + title: "Endpoint" + description: "Timeplus workspace endpoint" + type: "string" + default: "https://us-west-2.timeplus.cloud/" + examples: + - "https://us-west-2.timeplus.cloud/workspace_id" + order: 0 + apikey: + title: "API key" + description: "Personal API key" + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + destinationType: + title: "timeplus" + const: "timeplus" + enum: + - "timeplus" + order: 0 + type: "string" + destination-timeplus-update: + title: "Destination Timeplus" + type: "object" + required: + - "endpoint" + - "apikey" + properties: + endpoint: + title: "Endpoint" + description: "Timeplus workspace endpoint" + type: "string" + default: "https://us-west-2.timeplus.cloud/" + examples: + - "https://us-west-2.timeplus.cloud/workspace_id" + order: 0 + apikey: + title: "API key" + description: "Personal API key" + type: "string" + airbyte_secret: true + order: 1 + destination-convex: + title: "Destination Convex" + type: "object" + required: + - "deployment_url" + - "access_key" + - "destinationType" + properties: + deployment_url: + type: "string" + description: "URL of the Convex deployment that is the destination" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + description: "API access key used to send data to a Convex deployment." + airbyte_secret: "true" + x-speakeasy-param-sensitive: true + destinationType: + title: "convex" + const: "convex" + enum: + - "convex" + order: 0 + type: "string" + destination-convex-update: + title: "Destination Convex" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + description: "URL of the Convex deployment that is the destination" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + description: "API access key used to send data to a Convex deployment." + airbyte_secret: "true" + destination-firestore: + title: "Destination Google Firestore" + type: "object" + required: + - "project_id" + - "destinationType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + destinationType: + title: "firestore" + const: "firestore" + enum: + - "firestore" + order: 0 + type: "string" + destination-firestore-update: + title: "Destination Google Firestore" + type: "object" + required: + - "project_id" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + destination-redshift: + title: "Redshift Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" + - "destinationType" + properties: + host: + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)" + type: "string" + title: "Host" + group: "connection" + order: 1 + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + title: "Port" + group: "connection" + order: 2 + username: + description: "Username to use to access the database." + type: "string" + title: "Username" + group: "connection" + order: 3 + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + group: "connection" + order: 4 + x-speakeasy-param-sensitive: true + database: + description: "Name of the database." + type: "string" + title: "Database" + group: "connection" + order: 5 + schema: + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. Unless specifically configured, the usual value\ + \ for this field is \"public\"." + type: "string" + examples: + - "public" + default: "public" + group: "connection" + title: "Default Schema" + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + group: "connection" + order: 7 + uploading_method: + title: "Uploading Method" + type: "object" + description: "The way data will be uploaded to Redshift." + group: "connection" + order: 8 + display_type: "radio" + oneOf: + - title: "AWS S3 Staging" + description: + "(recommended) Uploads data to S3 and then uses a\ + \ COPY to insert the data into Redshift. COPY is recommended for production\ + \ workloads for better speed and scalability. See AWS docs for more details." + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + enum: + - "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket." + examples: + - "airbyte.staging" + order: 0 + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: + "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 2 + access_key_id: + type: "string" + description: + "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key Id" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Secret Access Key" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for\ + \ the S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: + "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)." + title: "Destinations V2 Raw Table Schema" + order: 9 + group: "tables" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + group: "tables" + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 12 + group: "tables" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "redshift" + const: "redshift" + enum: + - "redshift" + order: 0 + type: "string" + groups: + - id: "connection" + title: "Connection" + - id: "tables" + title: "Tables" + destination-redshift-update: + title: "Redshift Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" + properties: + host: + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)" + type: "string" + title: "Host" + group: "connection" + order: 1 + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + title: "Port" + group: "connection" + order: 2 + username: + description: "Username to use to access the database." + type: "string" + title: "Username" + group: "connection" + order: 3 + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + group: "connection" + order: 4 + database: + description: "Name of the database." + type: "string" + title: "Database" + group: "connection" + order: 5 + schema: + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. Unless specifically configured, the usual value\ + \ for this field is \"public\"." + type: "string" + examples: + - "public" + default: "public" + group: "connection" + title: "Default Schema" + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + group: "connection" + order: 7 + uploading_method: + title: "Uploading Method" + type: "object" + description: "The way data will be uploaded to Redshift." + group: "connection" + order: 8 + display_type: "radio" + oneOf: + - title: "AWS S3 Staging" + description: + "(recommended) Uploads data to S3 and then uses a\ + \ COPY to insert the data into Redshift. COPY is recommended for production\ + \ workloads for better speed and scalability. See AWS docs for more details." + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + enum: + - "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket." + examples: + - "airbyte.staging" + order: 0 + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: + "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 2 + access_key_id: + type: "string" + description: + "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: + "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Secret Access Key" + airbyte_secret: true + order: 4 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for\ + \ the S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: + "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)." + title: "Destinations V2 Raw Table Schema" + order: 9 + group: "tables" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + group: "tables" + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 12 + group: "tables" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + groups: + - id: "connection" + title: "Connection" + - id: "tables" + title: "Tables" + destination-dynamodb: + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name_prefix" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + - "destinationType" + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name_prefix: + title: "Table name prefix" + type: "string" + description: "The prefix to use when naming DynamoDB tables." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + access_key_id: + type: "string" + description: + "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + x-speakeasy-param-sensitive: true + destinationType: + title: "dynamodb" + const: "dynamodb" + enum: + - "dynamodb" + order: 0 + type: "string" + destination-dynamodb-update: + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name_prefix" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name_prefix: + title: "Table name prefix" + type: "string" + description: "The prefix to use when naming DynamoDB tables." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + access_key_id: + type: "string" + description: + "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + destination-qdrant: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + url: + title: "Public Endpoint" + description: "Public Endpoint of the Qdrant cluser" + order: 0 + type: "string" + auth_method: + title: "Authentication Method" + description: "Method to authenticate with the Qdrant Instance" + default: "api_key_auth" + type: "object" + order: 1 + oneOf: + - title: "ApiKeyAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "api_key_auth" + const: "api_key_auth" + enum: + - "api_key_auth" + type: "string" + api_key: + title: "API Key" + description: "API Key for the Qdrant instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "api_key" + - title: "NoAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + prefer_grpc: + title: "Prefer gRPC" + description: + "Whether to prefer gRPC over HTTP. Set to true for Qdrant\ + \ cloud clusters" + default: true + type: "boolean" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 2 + type: "string" + distance_metric: + title: "Distance Metric" + description: + "The Distance metric used to measure similarities among\ + \ vectors. This field is only used if the collection defined in the\ + \ does not exist yet and is created automatically by the connector." + default: "cos" + enum: + - "dot" + - "cos" + - "euc" + type: "string" + text_field: + title: "Text Field" + description: "The field in the payload that contains the embedded text" + default: "text" + type: "string" + required: + - "url" + - "collection" + group: "Indexing" + description: "Indexing configuration" + destinationType: + title: "qdrant" + const: "qdrant" + enum: + - "qdrant" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-qdrant-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + url: + title: "Public Endpoint" + description: "Public Endpoint of the Qdrant cluser" + order: 0 + type: "string" + auth_method: + title: "Authentication Method" + description: "Method to authenticate with the Qdrant Instance" + default: "api_key_auth" + type: "object" + order: 1 + oneOf: + - title: "ApiKeyAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "api_key_auth" + const: "api_key_auth" + enum: + - "api_key_auth" + type: "string" + api_key: + title: "API Key" + description: "API Key for the Qdrant instance" + airbyte_secret: true + type: "string" + required: + - "api_key" + - title: "NoAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + prefer_grpc: + title: "Prefer gRPC" + description: + "Whether to prefer gRPC over HTTP. Set to true for Qdrant\ + \ cloud clusters" + default: true + type: "boolean" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 2 + type: "string" + distance_metric: + title: "Distance Metric" + description: + "The Distance metric used to measure similarities among\ + \ vectors. This field is only used if the collection defined in the\ + \ does not exist yet and is created automatically by the connector." + default: "cos" + enum: + - "dot" + - "cos" + - "euc" + type: "string" + text_field: + title: "Text Field" + description: "The field in the payload that contains the embedded text" + default: "text" + type: "string" + required: + - "url" + - "collection" + group: "Indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-snowflake: + title: "Snowflake Destination Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + - "destinationType" + properties: + host: + description: + "Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)" + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" + type: "string" + title: "Host" + pattern: + "^(http(s)?:\\/\\/)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\\ + .(snowflakecomputing\\.com|localstack\\.cloud))$" + pattern_descriptor: "{account_name}.snowflakecomputing.com or {accountname}.{aws_location}.aws.snowflakecomputing.com" + order: 0 + role: + description: + "Enter the role that you want to use to access Snowflake" + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 1 + warehouse: + description: + "Enter the name of the warehouse that you want to use as a compute cluster" + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 2 + database: + description: + "Enter the name of the database you want to sync data into" + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 3 + schema: + description: + "Enter the name of the default schema" + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Default Schema" + order: 4 + username: + description: "Enter the name of the user you want to use to access the database" + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 5 + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Key Pair Authentication" + type: "object" + order: 0 + required: + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + enum: + - "Key Pair Authentication" + default: "Key Pair Authentication" + order: 0 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Username and Password" + type: "object" + required: + - "password" + order: 1 + properties: + auth_type: + type: "string" + const: "Username and Password" + enum: + - "Username and Password" + default: "Username and Password" + order: 0 + password: + description: "Enter the password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 1 + x-speakeasy-param-sensitive: true + - title: "OAuth2.0" + type: "object" + order: 2 + required: + - "access_token" + - "refresh_token" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + default: "OAuth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "Enter your application's Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "Enter your application's Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Enter you application's Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Enter your application's Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 6 + jdbc_url_params: + description: + "Enter the additional properties to pass to the JDBC URL string\ + \ when connecting to the database (formatted as key=value pairs separated\ + \ by the symbol &). Example: key1=value1&key2=value2&key3=value3" + title: "JDBC URL Params" + type: "string" + order: 7 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 10 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + retention_period_days: + type: "integer" + default: 1 + description: + "The number of days of Snowflake Time Travel to enable on the\ + \ tables. See Snowflake's documentation for more information. Setting a nonzero\ + \ value will incur increased storage costs in your Snowflake instance." + title: "Data Retention Period (days)" + order: 13 + use_merge_for_upsert: + type: "boolean" + default: false + description: + "Use MERGE for de-duplication of final tables. This option\ + \ no effect if Final tables are disabled or Sync mode is not DEDUPE" + title: "Use MERGE for De-duplication of final tables" + order: 14 + destinationType: + title: "snowflake" + const: "snowflake" + enum: + - "snowflake" + order: 0 + type: "string" + destination-snowflake-update: + title: "Snowflake Destination Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + properties: + host: + description: + "Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)" + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" + type: "string" + title: "Host" + pattern: + "^(http(s)?:\\/\\/)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\\ + .(snowflakecomputing\\.com|localstack\\.cloud))$" + pattern_descriptor: "{account_name}.snowflakecomputing.com or {accountname}.{aws_location}.aws.snowflakecomputing.com" + order: 0 + role: + description: + "Enter the role that you want to use to access Snowflake" + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 1 + warehouse: + description: + "Enter the name of the warehouse that you want to use as a compute cluster" + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 2 + database: + description: + "Enter the name of the database you want to sync data into" + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 3 + schema: + description: + "Enter the name of the default schema" + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Default Schema" + order: 4 + username: + description: "Enter the name of the user you want to use to access the database" + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 5 + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Key Pair Authentication" + type: "object" + order: 0 + required: + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + enum: + - "Key Pair Authentication" + default: "Key Pair Authentication" + order: 0 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + - title: "Username and Password" + type: "object" + required: + - "password" + order: 1 + properties: + auth_type: + type: "string" + const: "Username and Password" + enum: + - "Username and Password" + default: "Username and Password" + order: 0 + password: + description: "Enter the password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 1 + - title: "OAuth2.0" + type: "object" + order: 2 + required: + - "access_token" + - "refresh_token" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + default: "OAuth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "Enter your application's Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Enter your application's Client secret" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Enter you application's Access Token" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Enter your application's Refresh Token" + airbyte_secret: true + order: 6 + jdbc_url_params: + description: + "Enter the additional properties to pass to the JDBC URL string\ + \ when connecting to the database (formatted as key=value pairs separated\ + \ by the symbol &). Example: key1=value1&key2=value2&key3=value3" + title: "JDBC URL Params" + type: "string" + order: 7 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 10 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + retention_period_days: + type: "integer" + default: 1 + description: + "The number of days of Snowflake Time Travel to enable on the\ + \ tables. See Snowflake's documentation for more information. Setting a nonzero\ + \ value will incur increased storage costs in your Snowflake instance." + title: "Data Retention Period (days)" + order: 13 + use_merge_for_upsert: + type: "boolean" + default: false + description: + "Use MERGE for de-duplication of final tables. This option\ + \ no effect if Final tables are disabled or Sync mode is not DEDUPE" + title: "Use MERGE for De-duplication of final tables" + order: 14 + destination-databricks: + title: "Databricks Lakehouse Destination Spec" + type: "object" + required: + - "accept_terms" + - "hostname" + - "http_path" + - "database" + - "authentication" + - "destinationType" + properties: + accept_terms: + title: "Agree to the Databricks JDBC Driver Terms & Conditions" + type: "boolean" + description: + "You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector." + default: false + order: 1 + hostname: + title: "Server Hostname" + type: "string" + description: "Databricks Cluster Server Hostname." + examples: + - "abc-12345678-wxyz.cloud.databricks.com" + order: 2 + http_path: + title: "HTTP Path" + type: "string" + description: "Databricks Cluster HTTP Path." + examples: + - "sql/1.0/warehouses/0000-1111111-abcd90" + order: 3 + port: + title: "Port" + type: "string" + description: "Databricks Cluster Port." + default: "443" + examples: + - "443" + order: 4 + database: + title: "Databricks Unity Catalog Name" + description: "The name of the unity catalog for the database" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema tables are written. If not specified otherwise,\ + \ the \"default\" will be used." + type: "string" + examples: + - "default" + default: "default" + order: 6 + authentication: + title: "Authentication" + type: "object" + description: "Authentication mechanism for Staging files and running queries" + default: "OAUTH" + order: 8 + oneOf: + - title: "OAuth2 (Recommended)" + required: + - "auth_type" + - "client_id" + - "secret" + properties: + auth_type: + type: "string" + const: "OAUTH" + order: 0 + enum: + - "OAUTH" + client_id: + type: "string" + order: 1 + secret: + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + - title: "Personal Access Token" + required: + - "auth_type" + - "personal_access_token" + properties: + auth_type: + type: "string" + const: "BASIC" + order: 0 + enum: + - "BASIC" + personal_access_token: + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Default to 'true'. Switch it to 'false' for debugging purpose." + default: true + order: 9 + raw_schema_override: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + default: "airbyte_internal" + order: 10 + destinationType: + title: "databricks" + const: "databricks" + enum: + - "databricks" + order: 0 + type: "string" + destination-databricks-update: + title: "Databricks Lakehouse Destination Spec" + type: "object" + required: + - "accept_terms" + - "hostname" + - "http_path" + - "database" + - "authentication" + properties: + accept_terms: + title: "Agree to the Databricks JDBC Driver Terms & Conditions" + type: "boolean" + description: + "You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector." + default: false + order: 1 + hostname: + title: "Server Hostname" + type: "string" + description: "Databricks Cluster Server Hostname." + examples: + - "abc-12345678-wxyz.cloud.databricks.com" + order: 2 + http_path: + title: "HTTP Path" + type: "string" + description: "Databricks Cluster HTTP Path." + examples: + - "sql/1.0/warehouses/0000-1111111-abcd90" + order: 3 + port: + title: "Port" + type: "string" + description: "Databricks Cluster Port." + default: "443" + examples: + - "443" + order: 4 + database: + title: "Databricks Unity Catalog Name" + description: "The name of the unity catalog for the database" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema tables are written. If not specified otherwise,\ + \ the \"default\" will be used." + type: "string" + examples: + - "default" + default: "default" + order: 6 + authentication: + title: "Authentication" + type: "object" + description: "Authentication mechanism for Staging files and running queries" + default: "OAUTH" + order: 8 + oneOf: + - title: "OAuth2 (Recommended)" + required: + - "auth_type" + - "client_id" + - "secret" + properties: + auth_type: + type: "string" + const: "OAUTH" + order: 0 + enum: + - "OAUTH" + client_id: + type: "string" + order: 1 + secret: + type: "string" + airbyte_secret: true + order: 2 + - title: "Personal Access Token" + required: + - "auth_type" + - "personal_access_token" + properties: + auth_type: + type: "string" + const: "BASIC" + order: 0 + enum: + - "BASIC" + personal_access_token: + type: "string" + airbyte_secret: true + order: 1 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Default to 'true'. Switch it to 'false' for debugging purpose." + default: true + order: 9 + raw_schema_override: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + default: "airbyte_internal" + order: 10 + destination-oracle: + title: "Oracle Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sid" + - "destinationType" + properties: + host: + title: "Host" + description: "The hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + examples: + - "1521" + order: 1 + sid: + title: "SID" + description: + "The System Identifier uniquely distinguishes the instance\ + \ from any other instance on the same computer." + type: "string" + order: 2 + username: + title: "User" + description: + "The username to access the database. This user must have CREATE\ + \ USER privileges in the database." + type: "string" + order: 3 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema is used as the target schema for all statements\ + \ issued from the connection that do not explicitly specify a schema name.\ + \ The usual value for this field is \"airbyte\". In Oracle, schemas and\ + \ users are the same thing, so the \"user\" parameter is used as the login\ + \ credentials and this is used for the default Airbyte message schema." + type: "string" + examples: + - "airbyte" + default: "airbyte" + order: 6 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the database encryption algorithm." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "oracle" + const: "oracle" + enum: + - "oracle" + order: 0 + type: "string" + destination-oracle-update: + title: "Oracle Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sid" + properties: + host: + title: "Host" + description: "The hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + examples: + - "1521" + order: 1 + sid: + title: "SID" + description: + "The System Identifier uniquely distinguishes the instance\ + \ from any other instance on the same computer." + type: "string" + order: 2 + username: + title: "User" + description: + "The username to access the database. This user must have CREATE\ + \ USER privileges in the database." + type: "string" + order: 3 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema is used as the target schema for all statements\ + \ issued from the connection that do not explicitly specify a schema name.\ + \ The usual value for this field is \"airbyte\". In Oracle, schemas and\ + \ users are the same thing, so the \"user\" parameter is used as the login\ + \ credentials and this is used for the default Airbyte message schema." + type: "string" + examples: + - "airbyte" + default: "airbyte" + order: 6 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the database encryption algorithm." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-aws-datalake: + title: "AWS Datalake Destination Spec" + type: "object" + required: + - "credentials" + - "region" + - "bucket_name" + - "lakeformation_database_name" + - "destinationType" + properties: + aws_account_id: + type: "string" + title: "AWS Account Id" + description: "target aws account id" + examples: + - "111111111111" + order: 1 + credentials: + title: "Authentication mode" + description: "Choose How to Authenticate to AWS." + type: "object" + oneOf: + - type: "object" + title: "IAM Role" + required: + - "role_arn" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM Role" + enum: + - "IAM Role" + default: "IAM Role" + order: 0 + role_arn: + title: "Target Role Arn" + type: "string" + description: "Will assume this role to write data to s3" + airbyte_secret: false + x-speakeasy-param-sensitive: true + - type: "object" + title: "IAM User" + required: + - "credentials_title" + - "aws_access_key_id" + - "aws_secret_access_key" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM User" + enum: + - "IAM User" + default: "IAM User" + order: 0 + aws_access_key_id: + title: "Access Key Id" + type: "string" + description: "AWS User Access Key Id" + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_secret_access_key: + title: "Secret Access Key" + type: "string" + description: "Secret Access Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 2 + region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + order: 4 + bucket_prefix: + title: "Target S3 Bucket Prefix" + type: "string" + description: "S3 prefix" + order: 5 + lakeformation_database_name: + title: "Lake Formation Database Name" + type: "string" + description: + "The default database this destination will use to create tables\ + \ in per stream. Can be changed per connection by customizing the namespace." + order: 6 + lakeformation_database_default_tag_key: + title: "Lake Formation Database Tag Key" + description: "Add a default tag key to databases created by this destination" + examples: + - "pii_level" + type: "string" + order: 7 + lakeformation_database_default_tag_values: + title: "Lake Formation Database Tag Values" + description: + "Add default values for the `Tag Key` to databases created\ + \ by this destination. Comma separate for multiple values." + examples: + - "private,public" + type: "string" + order: 8 + lakeformation_governed_tables: + title: "Lake Formation Governed Tables" + description: "Whether to create tables as LF governed tables." + type: "boolean" + default: false + order: 9 + format: + title: "Output Format *" + type: "object" + description: "Format of the data output." + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "GZIP" + default: "UNCOMPRESSED" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "ZSTD" + default: "SNAPPY" + order: 10 + partitioning: + title: "Choose how to partition data" + description: "Partition data by cursor fields when a cursor field is a date" + type: "string" + enum: + - "NO PARTITIONING" + - "DATE" + - "YEAR" + - "MONTH" + - "DAY" + - "YEAR/MONTH" + - "YEAR/MONTH/DAY" + default: "NO PARTITIONING" + order: 11 + glue_catalog_float_as_decimal: + title: "Glue Catalog: Float as Decimal" + description: + "Cast float/double as decimal(38,18). This can help achieve\ + \ higher accuracy and represent numbers correctly as received from the\ + \ source." + type: "boolean" + default: false + order: 12 + destinationType: + title: "aws-datalake" + const: "aws-datalake" + enum: + - "aws-datalake" + order: 0 + type: "string" + destination-aws-datalake-update: + title: "AWS Datalake Destination Spec" + type: "object" + required: + - "credentials" + - "region" + - "bucket_name" + - "lakeformation_database_name" + properties: + aws_account_id: + type: "string" + title: "AWS Account Id" + description: "target aws account id" + examples: + - "111111111111" + order: 1 + credentials: + title: "Authentication mode" + description: "Choose How to Authenticate to AWS." + type: "object" + oneOf: + - type: "object" + title: "IAM Role" + required: + - "role_arn" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM Role" + enum: + - "IAM Role" + default: "IAM Role" + order: 0 + role_arn: + title: "Target Role Arn" + type: "string" + description: "Will assume this role to write data to s3" + airbyte_secret: false + - type: "object" + title: "IAM User" + required: + - "credentials_title" + - "aws_access_key_id" + - "aws_secret_access_key" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM User" + enum: + - "IAM User" + default: "IAM User" + order: 0 + aws_access_key_id: + title: "Access Key Id" + type: "string" + description: "AWS User Access Key Id" + airbyte_secret: true + aws_secret_access_key: + title: "Secret Access Key" + type: "string" + description: "Secret Access Key" + airbyte_secret: true + order: 2 + region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + order: 4 + bucket_prefix: + title: "Target S3 Bucket Prefix" + type: "string" + description: "S3 prefix" + order: 5 + lakeformation_database_name: + title: "Lake Formation Database Name" + type: "string" + description: + "The default database this destination will use to create tables\ + \ in per stream. Can be changed per connection by customizing the namespace." + order: 6 + lakeformation_database_default_tag_key: + title: "Lake Formation Database Tag Key" + description: "Add a default tag key to databases created by this destination" + examples: + - "pii_level" + type: "string" + order: 7 + lakeformation_database_default_tag_values: + title: "Lake Formation Database Tag Values" + description: + "Add default values for the `Tag Key` to databases created\ + \ by this destination. Comma separate for multiple values." + examples: + - "private,public" + type: "string" + order: 8 + lakeformation_governed_tables: + title: "Lake Formation Governed Tables" + description: "Whether to create tables as LF governed tables." + type: "boolean" + default: false + order: 9 + format: + title: "Output Format *" + type: "object" + description: "Format of the data output." + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "GZIP" + default: "UNCOMPRESSED" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "ZSTD" + default: "SNAPPY" + order: 10 + partitioning: + title: "Choose how to partition data" + description: "Partition data by cursor fields when a cursor field is a date" + type: "string" + enum: + - "NO PARTITIONING" + - "DATE" + - "YEAR" + - "MONTH" + - "DAY" + - "YEAR/MONTH" + - "YEAR/MONTH/DAY" + default: "NO PARTITIONING" + order: 11 + glue_catalog_float_as_decimal: + title: "Glue Catalog: Float as Decimal" + description: + "Cast float/double as decimal(38,18). This can help achieve\ + \ higher accuracy and represent numbers correctly as received from the\ + \ source." + type: "boolean" + default: false + order: 12 + destination-milvus: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "milvus" + const: "milvus" + enum: + - "milvus" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-milvus-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-firebolt: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + - "destinationType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + order: 0 + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: + "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + enum: + - "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + enum: + - "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + x-speakeasy-param-sensitive: true + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + x-speakeasy-param-sensitive: true + destinationType: + title: "firebolt" + const: "firebolt" + enum: + - "firebolt" + order: 0 + type: "string" + destination-firebolt-update: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + order: 0 + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + order: 1 + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: + "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + enum: + - "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + enum: + - "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + destination-google-sheets: + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + - "destinationType" + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "Authentication via Google (OAuth)" + description: + "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + destinationType: + title: "google-sheets" + const: "google-sheets" + enum: + - "google-sheets" + order: 0 + type: "string" + destination-google-sheets-update: + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "Authentication via Google (OAuth)" + description: + "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + destination-astra: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + astra_db_app_token: + title: "Astra DB Application Token" + description: + "The application token authorizes a user to connect to\ + \ a specific Astra DB database. It is created when the user clicks\ + \ the Generate Token button on the Overview tab of the Database page\ + \ in the Astra UI." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + astra_db_endpoint: + title: "Astra DB Endpoint" + description: + "The endpoint specifies which Astra DB database queries\ + \ are sent to. It can be copied from the Database Details section\ + \ of the Overview tab of the Database page in the Astra UI." + pattern: + "^https:\\/\\/([a-z]|[0-9]){8}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){12}-[^\\\ + .]*?\\.apps\\.astra\\.datastax\\.com" + examples: + - "https://8292d414-dd1b-4c33-8431-e838bedc04f7-us-east1.apps.astra.datastax.com" + type: "string" + astra_db_keyspace: + title: "Astra DB Keyspace" + description: + "Keyspaces (or Namespaces) serve as containers for organizing\ + \ data within a database. You can create a new keyspace uisng the\ + \ Data Explorer tab in the Astra UI. The keyspace default_keyspace\ + \ is created for you when you create a Vector Database in Astra DB." + type: "string" + collection: + title: "Astra DB collection" + description: + "Collections hold data. They are analagous to tables in\ + \ traditional Cassandra terminology. This tool will create the collection\ + \ with the provided name automatically if it does not already exist.\ + \ Alternatively, you can create one thorugh the Data Explorer tab\ + \ in the Astra UI." + type: "string" + required: + - "astra_db_app_token" + - "astra_db_endpoint" + - "astra_db_keyspace" + - "collection" + description: + "Astra DB gives developers the APIs, real-time data and ecosystem\ + \ integrations to put accurate RAG and Gen AI apps with fewer hallucinations\ + \ in production." + group: "indexing" + destinationType: + title: "astra" + const: "astra" + enum: + - "astra" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-astra-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + astra_db_app_token: + title: "Astra DB Application Token" + description: + "The application token authorizes a user to connect to\ + \ a specific Astra DB database. It is created when the user clicks\ + \ the Generate Token button on the Overview tab of the Database page\ + \ in the Astra UI." + airbyte_secret: true + type: "string" + astra_db_endpoint: + title: "Astra DB Endpoint" + description: + "The endpoint specifies which Astra DB database queries\ + \ are sent to. It can be copied from the Database Details section\ + \ of the Overview tab of the Database page in the Astra UI." + pattern: + "^https:\\/\\/([a-z]|[0-9]){8}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){12}-[^\\\ + .]*?\\.apps\\.astra\\.datastax\\.com" + examples: + - "https://8292d414-dd1b-4c33-8431-e838bedc04f7-us-east1.apps.astra.datastax.com" + type: "string" + astra_db_keyspace: + title: "Astra DB Keyspace" + description: + "Keyspaces (or Namespaces) serve as containers for organizing\ + \ data within a database. You can create a new keyspace uisng the\ + \ Data Explorer tab in the Astra UI. The keyspace default_keyspace\ + \ is created for you when you create a Vector Database in Astra DB." + type: "string" + collection: + title: "Astra DB collection" + description: + "Collections hold data. They are analagous to tables in\ + \ traditional Cassandra terminology. This tool will create the collection\ + \ with the provided name automatically if it does not already exist.\ + \ Alternatively, you can create one thorugh the Data Explorer tab\ + \ in the Astra UI." + type: "string" + required: + - "astra_db_app_token" + - "astra_db_endpoint" + - "astra_db_keyspace" + - "collection" + description: + "Astra DB gives developers the APIs, real-time data and ecosystem\ + \ integrations to put accurate RAG and Gen AI apps with fewer hallucinations\ + \ in production." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-teradata: + title: "Teradata Destination Spec" + type: "object" + required: + - "host" + - "username" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "airbyte_td" + default: "airbyte_td" + order: 3 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 5 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the destination database\n prefer - Chose this\ + \ mode to allow unencrypted connection only if the destination database\ + \ does not support encryption\n require - Chose this mode to always\ + \ require encryption. If the destination database server does not support\ + \ encryption, connection will fail\n verify-ca - Chose this mode\ + \ to always require encryption and to verify that the destination database\ + \ server has a valid SSL certificate\n verify-full - This is the\ + \ most secure mode. Chose this mode to always require encryption and to\ + \ verify the identity of the destination database server\n See more information\ + \ - in the docs." + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-ca.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-full.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 7 + destinationType: + title: "teradata" + const: "teradata" + enum: + - "teradata" + order: 0 + type: "string" + destination-teradata-update: + title: "Teradata Destination Spec" + type: "object" + required: + - "host" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "airbyte_td" + default: "airbyte_td" + order: 3 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 5 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the destination database\n prefer - Chose this\ + \ mode to allow unencrypted connection only if the destination database\ + \ does not support encryption\n require - Chose this mode to always\ + \ require encryption. If the destination database server does not support\ + \ encryption, connection will fail\n verify-ca - Chose this mode\ + \ to always require encryption and to verify that the destination database\ + \ server has a valid SSL certificate\n verify-full - This is the\ + \ most secure mode. Chose this mode to always require encryption and to\ + \ verify the identity of the destination database server\n See more information\ + \ - in the docs." + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-ca.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-full.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 7 + destination-pinecone: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + description: + "The Pinecone API key to use matching the environment (copy\ + \ from Pinecone console)" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + pinecone_environment: + title: "Pinecone Environment" + description: "Pinecone Cloud environment to use" + examples: + - "us-west1-gcp" + - "gcp-starter" + type: "string" + index: + title: "Index" + description: "Pinecone index in your project to load data into" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: + "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + destinationType: + title: "pinecone" + const: "pinecone" + enum: + - "pinecone" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-pinecone-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + description: + "The Pinecone API key to use matching the environment (copy\ + \ from Pinecone console)" + airbyte_secret: true + type: "string" + pinecone_environment: + title: "Pinecone Environment" + description: "Pinecone Cloud environment to use" + examples: + - "us-west1-gcp" + - "gcp-starter" + type: "string" + index: + title: "Index" + description: "Pinecone index in your project to load data into" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: + "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-duckdb: + title: "Destination Duckdb" + type: "object" + required: + - "destination_path" + - "destinationType" + properties: + motherduck_api_key: + title: "MotherDuck API Key" + type: "string" + description: "API key to use for authentication to a MotherDuck database." + airbyte_secret: true + x-speakeasy-param-sensitive: true + destination_path: + title: "Destination DB" + type: "string" + description: + "Path to the .duckdb file, or the text 'md:' to connect to\ + \ MotherDuck. The file will be placed inside that local mount. For more\ + \ information check out our docs" + examples: + - "/local/destination.duckdb" + - "md:" + - "motherduck:" + schema: + title: "Destination Schema" + type: "string" + description: "Database schema name, default for duckdb is 'main'." + example: "main" + destinationType: + title: "duckdb" + const: "duckdb" + enum: + - "duckdb" + order: 0 + type: "string" + destination-duckdb-update: + title: "Destination Duckdb" + type: "object" + required: + - "destination_path" + properties: + motherduck_api_key: + title: "MotherDuck API Key" + type: "string" + description: "API key to use for authentication to a MotherDuck database." + airbyte_secret: true + destination_path: + title: "Destination DB" + type: "string" + description: + "Path to the .duckdb file, or the text 'md:' to connect to\ + \ MotherDuck. The file will be placed inside that local mount. For more\ + \ information check out our docs" + examples: + - "/local/destination.duckdb" + - "md:" + - "motherduck:" + schema: + title: "Destination Schema" + type: "string" + description: "Database schema name, default for duckdb is 'main'." + example: "main" + destination-iceberg: + title: "Iceberg Destination Spec" + type: "object" + required: + - "catalog_config" + - "storage_config" + - "format_config" + - "destinationType" + properties: + catalog_config: + title: "Iceberg catalog config" + type: "object" + description: "Catalog config of Iceberg." + oneOf: + - title: "HiveCatalog: Use Apache Hive MetaStore" + required: + - "catalog_type" + - "hive_thrift_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hive" + enum: + - "Hive" + order: 0 + hive_thrift_uri: + title: "Hive Metastore thrift uri" + type: "string" + description: "Hive MetaStore thrift server uri of iceberg catalog." + examples: + - "host:port" + order: 1 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 2 + - title: + "HadoopCatalog: Use hierarchical file systems as same as storage\ + \ config" + description: + "A Hadoop catalog doesn’t need to connect to a Hive MetaStore,\ + \ but can only be used with HDFS or similar file systems that support\ + \ atomic rename." + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hadoop" + enum: + - "Hadoop" + order: 0 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 1 + - title: "JdbcCatalog: Use relational database" + description: + "Using a table in a relational database to manage Iceberg\ + \ tables through JDBC. Read more here. Supporting: PostgreSQL" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Jdbc" + enum: + - "Jdbc" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + jdbc_url: + title: "Jdbc url" + type: "string" + examples: + - "jdbc:postgresql://{host}:{port}/{database}" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please\ + \ select one of the connection modes." + type: "boolean" + default: false + order: 5 + catalog_schema: + title: "schema for Iceberg catalog" + description: + "Iceberg catalog metadata tables are written to catalog\ + \ schema. The usual value for this field is \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 6 + - title: "RESTCatalog" + description: + "The RESTCatalog connects to a REST server at the specified\ + \ URI" + required: + - "catalog_type" + - "rest_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Rest" + enum: + - "Rest" + order: 0 + rest_uri: + title: "REST Server URI" + type: "string" + examples: + - "http://localhost:12345" + order: 1 + rest_credential: + title: + "A credential to exchange for a token in the OAuth2 client\ + \ credentials flow." + type: "string" + airbyte_secret: true + examples: + - "username:password" + order: 2 + x-speakeasy-param-sensitive: true + rest_token: + title: + "A Bearer token which will be used for interaction with the\ + \ server." + type: "string" + airbyte_secret: true + examples: + - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + order: 3 + x-speakeasy-param-sensitive: true + - title: "GlueCatalog" + description: "The GlueCatalog connects to a AWS Glue Catalog" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Glue" + enum: + - "Glue" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + order: 0 + storage_config: + title: "Storage config" + type: "object" + description: "Storage config of Iceberg." + oneOf: + - title: "S3" + type: "object" + description: "S3 object storage" + required: + - "storage_type" + - "access_key_id" + - "secret_access_key" + - "s3_warehouse_uri" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "S3" + enum: + - "S3" + order: 0 + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read\ + \ more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + s3_warehouse_uri: + title: "S3 Warehouse Uri for Iceberg" + type: "string" + description: "The Warehouse Uri for Iceberg" + examples: + - "s3a://my-bucket/path/to/warehouse" + - "s3://my-bucket/path/to/warehouse" + order: 2 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + - "localhost:9000" + order: 4 + s3_path_style_access: + type: "boolean" + description: "Use path style access" + examples: + - true + - false + default: true + order: 5 + - title: "Server-managed" + type: "object" + description: "Server-managed object storage" + required: + - "storage_type" + - "managed_warehouse_name" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "MANAGED" + enum: + - "MANAGED" + order: 0 + managed_warehouse_name: + type: "string" + description: "The name of the managed warehouse" + title: "Warehouse name" + order: 0 + order: 1 + format_config: + title: "File format" + type: "object" + required: + - "format" + description: "File format of Iceberg storage." + properties: + format: + title: "File storage format" + type: "string" + default: "Parquet" + description: "" + enum: + - "Parquet" + - "Avro" + order: 0 + flush_batch_size: + title: "Data file flushing batch size" + description: + "Iceberg data file flush batch size. Incoming rows write\ + \ to cache firstly; When cache size reaches this 'batch size', flush\ + \ into real Iceberg data file." + type: "integer" + default: 10000 + order: 1 + auto_compact: + title: "Auto compact data files" + description: "Auto compact data files when stream close" + type: "boolean" + default: false + order: 2 + compact_target_file_size_in_mb: + title: "Target size of compacted data file" + description: + "Specify the target size of Iceberg data file when performing\ + \ a compaction action. " + type: "integer" + default: 100 + order: 3 + order: 2 + destinationType: + title: "iceberg" + const: "iceberg" + enum: + - "iceberg" + order: 0 + type: "string" + destination-iceberg-update: + title: "Iceberg Destination Spec" + type: "object" + required: + - "catalog_config" + - "storage_config" + - "format_config" + properties: + catalog_config: + title: "Iceberg catalog config" + type: "object" + description: "Catalog config of Iceberg." + oneOf: + - title: "HiveCatalog: Use Apache Hive MetaStore" + required: + - "catalog_type" + - "hive_thrift_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hive" + enum: + - "Hive" + order: 0 + hive_thrift_uri: + title: "Hive Metastore thrift uri" + type: "string" + description: "Hive MetaStore thrift server uri of iceberg catalog." + examples: + - "host:port" + order: 1 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 2 + - title: + "HadoopCatalog: Use hierarchical file systems as same as storage\ + \ config" + description: + "A Hadoop catalog doesn’t need to connect to a Hive MetaStore,\ + \ but can only be used with HDFS or similar file systems that support\ + \ atomic rename." + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hadoop" + enum: + - "Hadoop" + order: 0 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 1 + - title: "JdbcCatalog: Use relational database" + description: + "Using a table in a relational database to manage Iceberg\ + \ tables through JDBC. Read more here. Supporting: PostgreSQL" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Jdbc" + enum: + - "Jdbc" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + jdbc_url: + title: "Jdbc url" + type: "string" + examples: + - "jdbc:postgresql://{host}:{port}/{database}" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please\ + \ select one of the connection modes." + type: "boolean" + default: false + order: 5 + catalog_schema: + title: "schema for Iceberg catalog" + description: + "Iceberg catalog metadata tables are written to catalog\ + \ schema. The usual value for this field is \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 6 + - title: "RESTCatalog" + description: + "The RESTCatalog connects to a REST server at the specified\ + \ URI" + required: + - "catalog_type" + - "rest_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Rest" + enum: + - "Rest" + order: 0 + rest_uri: + title: "REST Server URI" + type: "string" + examples: + - "http://localhost:12345" + order: 1 + rest_credential: + title: + "A credential to exchange for a token in the OAuth2 client\ + \ credentials flow." + type: "string" + airbyte_secret: true + examples: + - "username:password" + order: 2 + rest_token: + title: + "A Bearer token which will be used for interaction with the\ + \ server." + type: "string" + airbyte_secret: true + examples: + - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + order: 3 + - title: "GlueCatalog" + description: "The GlueCatalog connects to a AWS Glue Catalog" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Glue" + enum: + - "Glue" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + order: 0 + storage_config: + title: "Storage config" + type: "object" + description: "Storage config of Iceberg." + oneOf: + - title: "S3" + type: "object" + description: "S3 object storage" + required: + - "storage_type" + - "access_key_id" + - "secret_access_key" + - "s3_warehouse_uri" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "S3" + enum: + - "S3" + order: 0 + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read\ + \ more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_warehouse_uri: + title: "S3 Warehouse Uri for Iceberg" + type: "string" + description: "The Warehouse Uri for Iceberg" + examples: + - "s3a://my-bucket/path/to/warehouse" + - "s3://my-bucket/path/to/warehouse" + order: 2 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + - "localhost:9000" + order: 4 + s3_path_style_access: + type: "boolean" + description: "Use path style access" + examples: + - true + - false + default: true + order: 5 + - title: "Server-managed" + type: "object" + description: "Server-managed object storage" + required: + - "storage_type" + - "managed_warehouse_name" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "MANAGED" + enum: + - "MANAGED" + order: 0 + managed_warehouse_name: + type: "string" + description: "The name of the managed warehouse" + title: "Warehouse name" + order: 0 + order: 1 + format_config: + title: "File format" + type: "object" + required: + - "format" + description: "File format of Iceberg storage." + properties: + format: + title: "File storage format" + type: "string" + default: "Parquet" + description: "" + enum: + - "Parquet" + - "Avro" + order: 0 + flush_batch_size: + title: "Data file flushing batch size" + description: + "Iceberg data file flush batch size. Incoming rows write\ + \ to cache firstly; When cache size reaches this 'batch size', flush\ + \ into real Iceberg data file." + type: "integer" + default: 10000 + order: 1 + auto_compact: + title: "Auto compact data files" + description: "Auto compact data files when stream close" + type: "boolean" + default: false + order: 2 + compact_target_file_size_in_mb: + title: "Target size of compacted data file" + description: + "Specify the target size of Iceberg data file when performing\ + \ a compaction action. " + type: "integer" + default: 100 + order: 3 + order: 2 + destination-sftp-json: + title: "Destination SFTP JSON" + type: "object" + required: + - "host" + - "username" + - "password" + - "destination_path" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the SFTP server." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the SFTP server." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - 22 + order: 1 + username: + title: "User" + description: "Username to use to access the SFTP server." + type: "string" + order: 2 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + destination_path: + title: "Destination path" + type: "string" + description: "Path to the directory where json files will be written." + examples: + - "/json_data" + order: 4 + destinationType: + title: "sftp-json" + const: "sftp-json" + enum: + - "sftp-json" + order: 0 + type: "string" + destination-sftp-json-update: + title: "Destination SFTP JSON" + type: "object" + required: + - "host" + - "username" + - "password" + - "destination_path" + properties: + host: + title: "Host" + description: "Hostname of the SFTP server." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the SFTP server." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - 22 + order: 1 + username: + title: "User" + description: "Username to use to access the SFTP server." + type: "string" + order: 2 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 3 + destination_path: + title: "Destination path" + type: "string" + description: "Path to the directory where json files will be written." + examples: + - "/json_data" + order: 4 + destination-s3: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "destinationType" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + always_show: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + always_show: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + role_arn: + type: "string" + description: "The Role ARN" + title: "Role ARN" + examples: + - "arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId" + order: 2 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 3 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 4 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 5 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Avro" + default: "Avro" + order: 0 + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate Level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + order: 1 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + order: 6 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 7 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 8 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 9 + destinationType: + title: "s3" + const: "s3" + enum: + - "s3" + order: 0 + type: "string" + destination-s3-update: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + always_show: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + always_show: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + role_arn: + type: "string" + description: "The Role ARN" + title: "Role ARN" + examples: + - "arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId" + order: 2 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 3 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 4 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 5 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Avro" + default: "Avro" + order: 0 + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate Level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + order: 1 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + order: 6 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 7 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 8 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 9 + destination-redis: + title: "Redis Destination Spec" + type: "object" + required: + - "host" + - "username" + - "port" + - "cache_type" + - "destinationType" + properties: + host: + title: "Host" + description: "Redis host to connect to." + type: "string" + examples: + - "localhost,127.0.0.1" + order: 1 + port: + title: "Port" + description: "Port of Redis." + type: "integer" + minimum: 0 + maximum: 65536 + default: 6379 + order: 2 + username: + title: "Username" + description: "Username associated with Redis." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with Redis." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + type: "boolean" + description: + "Indicates whether SSL encryption protocol will be used to\ + \ connect to Redis. It is recommended to use SSL connection if possible." + default: false + order: 5 + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n
  • verify-full - This is\ + \ the most secure mode. Always require encryption and verifies the identity\ + \ of the source database server" + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + cache_type: + title: "Cache type" + type: "string" + default: "hash" + description: "Redis cache type to store data in." + enum: + - "hash" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "redis" + const: "redis" + enum: + - "redis" + order: 0 + type: "string" + destination-redis-update: + title: "Redis Destination Spec" + type: "object" + required: + - "host" + - "username" + - "port" + - "cache_type" + properties: + host: + title: "Host" + description: "Redis host to connect to." + type: "string" + examples: + - "localhost,127.0.0.1" + order: 1 + port: + title: "Port" + description: "Port of Redis." + type: "integer" + minimum: 0 + maximum: 65536 + default: 6379 + order: 2 + username: + title: "Username" + description: "Username associated with Redis." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with Redis." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + type: "boolean" + description: + "Indicates whether SSL encryption protocol will be used to\ + \ connect to Redis. It is recommended to use SSL connection if possible." + default: false + order: 5 + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n
  • verify-full - This is\ + \ the most secure mode. Always require encryption and verifies the identity\ + \ of the source database server" + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + cache_type: + title: "Cache type" + type: "string" + default: "hash" + description: "Redis cache type to store data in." + enum: + - "hash" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-typesense: + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + - "destinationType" + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: + "Hostname of the Typesense instance without protocol. Accept\ + \ multiple hosts separated by comma." + order: 1 + port: + title: "Port" + type: "string" + description: + "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: + "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "integer" + description: "How many documents should be imported together. Default 1000" + order: 4 + path: + title: "Path" + type: "string" + description: "Path of the Typesense instance. Default is none" + order: 5 + destinationType: + title: "typesense" + const: "typesense" + enum: + - "typesense" + order: 0 + type: "string" + destination-typesense-update: + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: + "Hostname of the Typesense instance without protocol. Accept\ + \ multiple hosts separated by comma." + order: 1 + port: + title: "Port" + type: "string" + description: + "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: + "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "integer" + description: "How many documents should be imported together. Default 1000" + order: 4 + path: + title: "Path" + type: "string" + description: "Path of the Typesense instance. Default is none" + order: 5 + destination-bigquery: + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_location" + - "dataset_id" + - "destinationType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset. Read more here." + title: "Project ID" + group: "connection" + order: 0 + dataset_location: + type: "string" + description: + "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied. Read more here." + title: "Dataset Location" + group: "connection" + order: 1 + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-southwest1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "europe-west7" + - "europe-west8" + - "europe-west9" + - "europe-west12" + - "me-central1" + - "me-central2" + - "me-west1" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "southamerica-east1" + - "southamerica-west1" + - "us-central1" + - "us-east1" + - "us-east2" + - "us-east3" + - "us-east4" + - "us-east5" + - "us-south1" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + dataset_id: + type: "string" + description: + "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" + group: "connection" + order: 2 + loading_method: + type: "object" + title: "Loading Method" + description: "The way data will be uploaded to BigQuery." + display_type: "radio" + group: "connection" + order: 3 + oneOf: + - title: "Batched Standard Inserts" + required: + - "method" + description: + "Direct loading using batched SQL INSERT statements. This\ + \ method uses the BigQuery driver to convert large INSERT statements\ + \ into file uploads automatically." + properties: + method: + type: "string" + const: "Standard" + enum: + - "Standard" + - title: "GCS Staging" + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses COPY INTO to load your data into BigQuery." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + enum: + - "GCS Staging" + credential: + title: "Credential" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read\ + \ more here." + type: "object" + order: 1 + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + order: 0 + enum: + - "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + order: 1 + x-speakeasy-param-sensitive: true + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + order: 2 + x-speakeasy-param-sensitive: true + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the GCS bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + gcs_bucket_path: + title: "GCS Bucket Path" + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + order: 3 + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. By this select you can chose if these records should\ + \ be removed from GCS when migration has finished. The default \"\ + Delete all tmp files from GCS\" value is used if not set explicitly." + title: "GCS Tmp Files Afterward Processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + order: 4 + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Service Account Key JSON (Required for cloud, optional for open-source)" + airbyte_secret: true + group: "connection" + order: 4 + always_show: true + x-speakeasy-param-sensitive: true + transformation_priority: + type: "string" + description: + "Interactive run type means that the query is executed as soon\ + \ as possible, and these queries count towards concurrent rate limit and\ + \ daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources\ + \ are available in the BigQuery shared resource pool, which usually occurs\ + \ within a few minutes. Batch queries don’t count towards your concurrent\ + \ rate limit. Read more about batch queries here. The default \"interactive\" value is used if not set explicitly." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + order: 5 + group: "advanced" + big_query_client_buffer_size_mb: + title: "Google BigQuery Client Chunk Size" + description: + "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\ + \ 15) for each table. The size that will be written by a single RPC. Written\ + \ data will be buffered and only flushed upon reaching this size or closing\ + \ the channel. The default 15MB value is used if not set explicitly. Read\ + \ more here." + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + order: 6 + group: "advanced" + raw_data_dataset: + type: "string" + description: "The dataset to write raw tables into (default: airbyte_internal)" + title: "Raw Table Dataset Name" + order: 7 + group: "advanced" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + group: "advanced" + destinationType: + title: "bigquery" + const: "bigquery" + enum: + - "bigquery" + order: 0 + type: "string" + groups: + - id: "connection" + title: "Connection" + - id: "advanced" + title: "Advanced" + destination-bigquery-update: + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_location" + - "dataset_id" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset. Read more here." + title: "Project ID" + group: "connection" + order: 0 + dataset_location: + type: "string" + description: + "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied. Read more here." + title: "Dataset Location" + group: "connection" + order: 1 + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-southwest1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "europe-west7" + - "europe-west8" + - "europe-west9" + - "europe-west12" + - "me-central1" + - "me-central2" + - "me-west1" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "southamerica-east1" + - "southamerica-west1" + - "us-central1" + - "us-east1" + - "us-east2" + - "us-east3" + - "us-east4" + - "us-east5" + - "us-south1" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + dataset_id: + type: "string" + description: + "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" + group: "connection" + order: 2 + loading_method: + type: "object" + title: "Loading Method" + description: "The way data will be uploaded to BigQuery." + display_type: "radio" + group: "connection" + order: 3 + oneOf: + - title: "Batched Standard Inserts" + required: + - "method" + description: + "Direct loading using batched SQL INSERT statements. This\ + \ method uses the BigQuery driver to convert large INSERT statements\ + \ into file uploads automatically." + properties: + method: + type: "string" + const: "Standard" + enum: + - "Standard" + - title: "GCS Staging" + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses COPY INTO to load your data into BigQuery." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + enum: + - "GCS Staging" + credential: + title: "Credential" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read\ + \ more here." + type: "object" + order: 1 + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + order: 0 + enum: + - "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + order: 1 + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + order: 2 + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the GCS bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + gcs_bucket_path: + title: "GCS Bucket Path" + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + order: 3 + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. By this select you can chose if these records should\ + \ be removed from GCS when migration has finished. The default \"\ + Delete all tmp files from GCS\" value is used if not set explicitly." + title: "GCS Tmp Files Afterward Processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + order: 4 + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Service Account Key JSON (Required for cloud, optional for open-source)" + airbyte_secret: true + group: "connection" + order: 4 + always_show: true + transformation_priority: + type: "string" + description: + "Interactive run type means that the query is executed as soon\ + \ as possible, and these queries count towards concurrent rate limit and\ + \ daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources\ + \ are available in the BigQuery shared resource pool, which usually occurs\ + \ within a few minutes. Batch queries don’t count towards your concurrent\ + \ rate limit. Read more about batch queries here. The default \"interactive\" value is used if not set explicitly." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + order: 5 + group: "advanced" + big_query_client_buffer_size_mb: + title: "Google BigQuery Client Chunk Size" + description: + "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\ + \ 15) for each table. The size that will be written by a single RPC. Written\ + \ data will be buffered and only flushed upon reaching this size or closing\ + \ the channel. The default 15MB value is used if not set explicitly. Read\ + \ more here." + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + order: 6 + group: "advanced" + raw_data_dataset: + type: "string" + description: "The dataset to write raw tables into (default: airbyte_internal)" + title: "Raw Table Dataset Name" + order: 7 + group: "advanced" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + group: "advanced" + groups: + - id: "connection" + title: "Connection" + - id: "advanced" + title: "Advanced" + destination-elasticsearch: + title: "Elasticsearch Connection Configuration" + type: "object" + required: + - "endpoint" + - "destinationType" + properties: + endpoint: + title: "Server Endpoint" + type: "string" + description: "The full url of the Elasticsearch server" + upsert: + type: "boolean" + title: "Upsert Records" + description: + "If a primary key identifier is defined in the source, an upsert\ + \ will be performed using the primary key value as the elasticsearch doc\ + \ id. Does not support composite primary keys." + default: true + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + authenticationMethod: + title: "Authentication Method" + type: "object" + description: "The type of authentication to be used" + oneOf: + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + enum: + - "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + enum: + - "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + enum: + - "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "elasticsearch" + const: "elasticsearch" + enum: + - "elasticsearch" + order: 0 + type: "string" + destination-elasticsearch-update: + title: "Elasticsearch Connection Configuration" + type: "object" + required: + - "endpoint" + properties: + endpoint: + title: "Server Endpoint" + type: "string" + description: "The full url of the Elasticsearch server" + upsert: + type: "boolean" + title: "Upsert Records" + description: + "If a primary key identifier is defined in the source, an upsert\ + \ will be performed using the primary key value as the elasticsearch doc\ + \ id. Does not support composite primary keys." + default: true + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + authenticationMethod: + title: "Authentication Method" + type: "object" + description: "The type of authentication to be used" + oneOf: + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + enum: + - "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + enum: + - "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + enum: + - "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-azure-blob-storage: + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + - "destinationType" + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: + "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + x-speakeasy-param-sensitive: true + azure_blob_storage_output_buffer_size: + title: "Azure Blob Storage output buffer size (Megabytes)" + type: "integer" + description: + "The amount of megabytes to buffer for the output stream to\ + \ Azure. This will impact memory footprint on workers, but may need adjustment\ + \ for performance and appropriate block size in Azure." + minimum: 1 + maximum: 2047 + default: 5 + examples: + - 5 + azure_blob_storage_spill_size: + title: "Azure Blob Storage file spill size" + type: "integer" + description: + "The amount of megabytes after which the connector should spill\ + \ the records in a new blob object. Make sure to configure size greater\ + \ than individual records. Enter 0 if not applicable" + default: 500 + examples: + - 500 + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + enum: + - "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + enum: + - "JSONL" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + destinationType: + title: "azure-blob-storage" + const: "azure-blob-storage" + enum: + - "azure-blob-storage" + order: 0 + type: "string" + destination-azure-blob-storage-update: + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: + "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + azure_blob_storage_output_buffer_size: + title: "Azure Blob Storage output buffer size (Megabytes)" + type: "integer" + description: + "The amount of megabytes to buffer for the output stream to\ + \ Azure. This will impact memory footprint on workers, but may need adjustment\ + \ for performance and appropriate block size in Azure." + minimum: 1 + maximum: 2047 + default: 5 + examples: + - 5 + azure_blob_storage_spill_size: + title: "Azure Blob Storage file spill size" + type: "integer" + description: + "The amount of megabytes after which the connector should spill\ + \ the records in a new blob object. Make sure to configure size greater\ + \ than individual records. Enter 0 if not applicable" + default: 500 + examples: + - 500 + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + enum: + - "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + enum: + - "JSONL" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + destination-pgvector: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Postgres Connection" + type: "object" + properties: + host: + title: "Host" + description: "Enter the account name you want to use to access the database." + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + port: + title: "Port" + description: "Enter the port you want to use to access the database" + default: 5432 + order: 2 + examples: + - "5432" + type: "integer" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + default: "public" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + required: + - "host" + - "database" + - "username" + - "credentials" + description: "Postgres can be used to store vector data and retrieve embeddings." + group: "indexing" + destinationType: + title: "pgvector" + const: "pgvector" + enum: + - "pgvector" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-pgvector-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Postgres Connection" + type: "object" + properties: + host: + title: "Host" + description: "Enter the account name you want to use to access the database." + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + port: + title: "Port" + description: "Enter the port you want to use to access the database" + default: 5432 + order: 2 + examples: + - "5432" + type: "integer" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + default: "public" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + required: + - "password" + required: + - "host" + - "database" + - "username" + - "credentials" + description: "Postgres can be used to store vector data and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-yellowbrick: + title: "Yellowbrick Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "yellowbrick" + const: "yellowbrick" + enum: + - "yellowbrick" + order: 0 + type: "string" + destination-yellowbrick-update: + title: "Yellowbrick Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-postgres: + title: "Postgres Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into" + title: "Raw table schema (defaults to airbyte_internal)" + order: 9 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 10 + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 11 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "postgres" + const: "postgres" + enum: + - "postgres" + order: 0 + type: "string" + destination-postgres-update: + title: "Postgres Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into" + title: "Raw table schema (defaults to airbyte_internal)" + order: 9 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 10 + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 11 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + InitiateOauthRequest: + title: Root Type for initiate-oauth-post-body + description: POST body for initiating OAuth via the public API + required: + - redirectUrl + - workspaceId + - sourceType + type: object + example: + redirectUrl: "https://cloud.airbyte.io/v1/api/oauth/callback" + workspaceId: 871d9b60-11d1-44cb-8c92-c246d53bf87e + destinationId: 3d93b16c-ff5f-421c-8908-5a3c82088f14 + properties: + redirectUrl: + description: >- + The URL to redirect the user to with the OAuth secret stored in the secret_id query + string parameter after authentication is complete. + type: string + workspaceId: + format: uuid + description: The workspace to create the secret and eventually the full source. + type: string + oAuthInputConfiguration: + $ref: "#/components/schemas/OAuthInputConfiguration" + description: Input configuration for OAuth required by some sources. + sourceType: + $ref: "#/components/schemas/OAuthActorNames" + WorkspaceOAuthCredentialsRequest: + title: "Root Type for WorkspaceOAuthCredentials" + description: "POST body for creating/updating workspace level OAuth credentials" + required: + - "actorType" + - "name" + - "configuration" + type: "object" + properties: + actorType: + $ref: "#/components/schemas/ActorTypeEnum" + name: + $ref: "#/components/schemas/OAuthActorNames" + configuration: + $ref: "#/components/schemas/OAuthCredentialsConfiguration" + OAuthActorNames: + enum: + - airtable + - amazon-ads + - amazon-seller-partner + - asana + - azure-blob-storage + - bing-ads + - facebook-marketing + - github + - gitlab + - google-ads + - google-analytics-data-api + - google-drive + - google-search-console + - google-sheets + - hubspot + - instagram + - intercom + - lever-hiring + - linkedin-ads + - mailchimp + - microsoft-onedrive + - microsoft-sharepoint + - microsoft-teams + - monday + - notion + - pinterest + - retently + - salesforce + - slack + - smartsheets + - snapchat-marketing + - snowflake + - square + - surveymonkey + - tiktok-marketing + - trello + - typeform + - youtube-analytics + - zendesk-chat + - zendesk-sunshine + - zendesk-support + - zendesk-talk + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + oneOf: + - title: source-aha + $ref: "#/components/schemas/source-aha" + - title: source-7shifts + $ref: "#/components/schemas/source-7shifts" + - title: source-airbyte + $ref: "#/components/schemas/source-airbyte" + - title: source-aircall + $ref: "#/components/schemas/source-aircall" + - title: source-airtable + $ref: "#/components/schemas/source-airtable" + - title: source-algolia + $ref: "#/components/schemas/source-algolia" + - title: source-amazon-ads + $ref: "#/components/schemas/source-amazon-ads" + - title: source-amazon-seller-partner + $ref: "#/components/schemas/source-amazon-seller-partner" + - title: source-amazon-sqs + $ref: "#/components/schemas/source-amazon-sqs" + - title: source-amplitude + $ref: "#/components/schemas/source-amplitude" + - title: source-apify-dataset + $ref: "#/components/schemas/source-apify-dataset" + - title: source-appcues + $ref: "#/components/schemas/source-appcues" + - title: source-appfigures + $ref: "#/components/schemas/source-appfigures" + - title: source-appfollow + $ref: "#/components/schemas/source-appfollow" + - title: source-asana + $ref: "#/components/schemas/source-asana" + - title: source-auth0 + $ref: "#/components/schemas/source-auth0" + - title: source-aws-cloudtrail + $ref: "#/components/schemas/source-aws-cloudtrail" + - title: source-azure-blob-storage + $ref: "#/components/schemas/source-azure-blob-storage" + - title: source-azure-table + $ref: "#/components/schemas/source-azure-table" + - title: source-bamboo-hr + $ref: "#/components/schemas/source-bamboo-hr" + - title: source-basecamp + $ref: "#/components/schemas/source-basecamp" + - title: source-beamer + $ref: "#/components/schemas/source-beamer" + - title: source-bigquery + $ref: "#/components/schemas/source-bigquery" + - title: source-bing-ads + $ref: "#/components/schemas/source-bing-ads" + - title: source-bitly + $ref: "#/components/schemas/source-bitly" + - title: source-braintree + $ref: "#/components/schemas/source-braintree" + - title: source-braze + $ref: "#/components/schemas/source-braze" + - title: source-breezy-hr + $ref: "#/components/schemas/source-breezy-hr" + - title: source-brevo + $ref: "#/components/schemas/source-brevo" + - title: source-buildkite + $ref: "#/components/schemas/source-buildkite" + - title: source-buzzsprout + $ref: "#/components/schemas/source-buzzsprout" + - title: source-calendly + $ref: "#/components/schemas/source-calendly" + - title: source-canny + $ref: "#/components/schemas/source-canny" + - title: source-cart + $ref: "#/components/schemas/source-cart" + - title: source-chameleon + $ref: "#/components/schemas/source-chameleon" + - title: source-chargebee + $ref: "#/components/schemas/source-chargebee" + - title: source-chartmogul + $ref: "#/components/schemas/source-chartmogul" + - title: source-cimis + $ref: "#/components/schemas/source-cimis" + - title: source-clazar + $ref: "#/components/schemas/source-clazar" + - title: source-clickhouse + $ref: "#/components/schemas/source-clickhouse" + - title: source-clickup-api + $ref: "#/components/schemas/source-clickup-api" + - title: source-clockify + $ref: "#/components/schemas/source-clockify" + - title: source-close-com + $ref: "#/components/schemas/source-close-com" + - title: source-coda + $ref: "#/components/schemas/source-coda" + - title: source-coin-api + $ref: "#/components/schemas/source-coin-api" + - title: source-coinmarketcap + $ref: "#/components/schemas/source-coinmarketcap" + - title: source-configcat + $ref: "#/components/schemas/source-configcat" + - title: source-confluence + $ref: "#/components/schemas/source-confluence" + - title: source-convex + $ref: "#/components/schemas/source-convex" + - title: source-customer-io + $ref: "#/components/schemas/source-customer-io" + - title: source-datadog + $ref: "#/components/schemas/source-datadog" + - title: source-datascope + $ref: "#/components/schemas/source-datascope" + - title: source-dbt + $ref: "#/components/schemas/source-dbt" + - title: source-delighted + $ref: "#/components/schemas/source-delighted" + - title: source-dixa + $ref: "#/components/schemas/source-dixa" + - title: source-dockerhub + $ref: "#/components/schemas/source-dockerhub" + - title: source-dremio + $ref: "#/components/schemas/source-dremio" + - title: source-dropbox-sign + $ref: "#/components/schemas/source-dropbox-sign" + - title: source-dynamodb + $ref: "#/components/schemas/source-dynamodb" + - title: source-emailoctopus + $ref: "#/components/schemas/source-emailoctopus" + - title: source-eventbrite + $ref: "#/components/schemas/source-eventbrite" + - title: source-exchange-rates + $ref: "#/components/schemas/source-exchange-rates" + - title: source-ezofficeinventory + $ref: "#/components/schemas/source-ezofficeinventory" + - title: source-facebook-marketing + $ref: "#/components/schemas/source-facebook-marketing" + - title: source-faker + $ref: "#/components/schemas/source-faker" + - title: source-fauna + $ref: "#/components/schemas/source-fauna" + - title: source-file + $ref: "#/components/schemas/source-file" + - title: source-firebolt + $ref: "#/components/schemas/source-firebolt" + - title: source-fleetio + $ref: "#/components/schemas/source-fleetio" + - title: source-freshcaller + $ref: "#/components/schemas/source-freshcaller" + - title: source-freshchat + $ref: "#/components/schemas/source-freshchat" + - title: source-freshdesk + $ref: "#/components/schemas/source-freshdesk" + - title: source-freshsales + $ref: "#/components/schemas/source-freshsales" + - title: source-front + $ref: "#/components/schemas/source-front" + - title: source-gainsight-px + $ref: "#/components/schemas/source-gainsight-px" + - title: source-gcs + $ref: "#/components/schemas/source-gcs" + - title: source-getlago + $ref: "#/components/schemas/source-getlago" + - title: source-github + $ref: "#/components/schemas/source-github" + - title: source-gitlab + $ref: "#/components/schemas/source-gitlab" + - title: source-glassfrog + $ref: "#/components/schemas/source-glassfrog" + - title: source-gnews + $ref: "#/components/schemas/source-gnews" + - title: source-goldcast + $ref: "#/components/schemas/source-goldcast" + - title: source-google-ads + $ref: "#/components/schemas/source-google-ads" + - title: source-google-analytics-data-api + $ref: "#/components/schemas/source-google-analytics-data-api" + - title: source-google-directory + $ref: "#/components/schemas/source-google-directory" + - title: source-google-drive + $ref: "#/components/schemas/source-google-drive" + - title: source-google-pagespeed-insights + $ref: "#/components/schemas/source-google-pagespeed-insights" + - title: source-google-search-console + $ref: "#/components/schemas/source-google-search-console" + - title: source-google-sheets + $ref: "#/components/schemas/source-google-sheets" + - title: source-google-tasks + $ref: "#/components/schemas/source-google-tasks" + - title: source-google-webfonts + $ref: "#/components/schemas/source-google-webfonts" + - title: source-greenhouse + $ref: "#/components/schemas/source-greenhouse" + - title: source-gridly + $ref: "#/components/schemas/source-gridly" + - title: source-guru + $ref: "#/components/schemas/source-guru" + - title: source-hardcoded-records + $ref: "#/components/schemas/source-hardcoded-records" + - title: source-harvest + $ref: "#/components/schemas/source-harvest" + - title: source-height + $ref: "#/components/schemas/source-height" + - title: source-hibob + $ref: "#/components/schemas/source-hibob" + - title: source-high-level + $ref: "#/components/schemas/source-high-level" + - title: source-hubplanner + $ref: "#/components/schemas/source-hubplanner" + - title: source-hubspot + $ref: "#/components/schemas/source-hubspot" + - title: source-insightly + $ref: "#/components/schemas/source-insightly" + - title: source-instagram + $ref: "#/components/schemas/source-instagram" + - title: source-instatus + $ref: "#/components/schemas/source-instatus" + - title: source-intercom + $ref: "#/components/schemas/source-intercom" + - title: source-ip2whois + $ref: "#/components/schemas/source-ip2whois" + - title: source-iterable + $ref: "#/components/schemas/source-iterable" + - title: source-jira + $ref: "#/components/schemas/source-jira" + - title: source-jotform + $ref: "#/components/schemas/source-jotform" + - title: source-k6-cloud + $ref: "#/components/schemas/source-k6-cloud" + - title: source-kissmetrics + $ref: "#/components/schemas/source-kissmetrics" + - title: source-klarna + $ref: "#/components/schemas/source-klarna" + - title: source-klaviyo + $ref: "#/components/schemas/source-klaviyo" + - title: source-kyve + $ref: "#/components/schemas/source-kyve" + - title: source-launchdarkly + $ref: "#/components/schemas/source-launchdarkly" + - title: source-leadfeeder + $ref: "#/components/schemas/source-leadfeeder" + - title: source-lemlist + $ref: "#/components/schemas/source-lemlist" + - title: source-lever-hiring + $ref: "#/components/schemas/source-lever-hiring" + - title: source-linkedin-ads + $ref: "#/components/schemas/source-linkedin-ads" + - title: source-linkedin-pages + $ref: "#/components/schemas/source-linkedin-pages" + - title: source-linnworks + $ref: "#/components/schemas/source-linnworks" + - title: source-lob + $ref: "#/components/schemas/source-lob" + - title: source-lokalise + $ref: "#/components/schemas/source-lokalise" + - title: source-looker + $ref: "#/components/schemas/source-looker" + - title: source-luma + $ref: "#/components/schemas/source-luma" + - title: source-mailchimp + $ref: "#/components/schemas/source-mailchimp" + - title: source-mailgun + $ref: "#/components/schemas/source-mailgun" + - title: source-mailjet-sms + $ref: "#/components/schemas/source-mailjet-sms" + - title: source-marketo + $ref: "#/components/schemas/source-marketo" + - title: source-metabase + $ref: "#/components/schemas/source-metabase" + - title: source-microsoft-onedrive + $ref: "#/components/schemas/source-microsoft-onedrive" + - title: source-microsoft-sharepoint + $ref: "#/components/schemas/source-microsoft-sharepoint" + - title: source-microsoft-teams + $ref: "#/components/schemas/source-microsoft-teams" + - title: source-mixpanel + $ref: "#/components/schemas/source-mixpanel" + - title: source-monday + $ref: "#/components/schemas/source-monday" + - title: source-mongodb-v2 + $ref: "#/components/schemas/source-mongodb-v2" + - title: source-mssql + $ref: "#/components/schemas/source-mssql" + - title: source-my-hours + $ref: "#/components/schemas/source-my-hours" + - title: source-mysql + $ref: "#/components/schemas/source-mysql" + - title: source-netsuite + $ref: "#/components/schemas/source-netsuite" + - title: source-northpass-lms + $ref: "#/components/schemas/source-northpass-lms" + - title: source-notion + $ref: "#/components/schemas/source-notion" + - title: source-nylas + $ref: "#/components/schemas/source-nylas" + - title: source-nytimes + $ref: "#/components/schemas/source-nytimes" + - title: source-okta + $ref: "#/components/schemas/source-okta" + - title: source-omnisend + $ref: "#/components/schemas/source-omnisend" + - title: source-onesignal + $ref: "#/components/schemas/source-onesignal" + - title: source-oracle + $ref: "#/components/schemas/source-oracle" + - title: source-orb + $ref: "#/components/schemas/source-orb" + - title: source-orbit + $ref: "#/components/schemas/source-orbit" + - title: source-outbrain-amplify + $ref: "#/components/schemas/source-outbrain-amplify" + - title: source-outreach + $ref: "#/components/schemas/source-outreach" + - title: source-paypal-transaction + $ref: "#/components/schemas/source-paypal-transaction" + - title: source-paystack + $ref: "#/components/schemas/source-paystack" + - title: source-pendo + $ref: "#/components/schemas/source-pendo" + - title: source-pennylane + $ref: "#/components/schemas/source-pennylane" + - title: source-persistiq + $ref: "#/components/schemas/source-persistiq" + - title: source-pexels-api + $ref: "#/components/schemas/source-pexels-api" + - title: source-picqer + $ref: "#/components/schemas/source-picqer" + - title: source-pinterest + $ref: "#/components/schemas/source-pinterest" + - title: source-pipedrive + $ref: "#/components/schemas/source-pipedrive" + - title: source-piwik + $ref: "#/components/schemas/source-piwik" + - title: source-planhat + $ref: "#/components/schemas/source-planhat" + - title: source-pocket + $ref: "#/components/schemas/source-pocket" + - title: source-pokeapi + $ref: "#/components/schemas/source-pokeapi" + - title: source-polygon-stock-api + $ref: "#/components/schemas/source-polygon-stock-api" + - title: source-postgres + $ref: "#/components/schemas/source-postgres" + - title: source-posthog + $ref: "#/components/schemas/source-posthog" + - title: source-postmarkapp + $ref: "#/components/schemas/source-postmarkapp" + - title: source-prestashop + $ref: "#/components/schemas/source-prestashop" + - title: source-productboard + $ref: "#/components/schemas/source-productboard" + - title: source-productive + $ref: "#/components/schemas/source-productive" + - title: source-pypi + $ref: "#/components/schemas/source-pypi" + - title: source-qualaroo + $ref: "#/components/schemas/source-qualaroo" + - title: source-railz + $ref: "#/components/schemas/source-railz" + - title: source-recharge + $ref: "#/components/schemas/source-recharge" + - title: source-recreation + $ref: "#/components/schemas/source-recreation" + - title: source-recruitee + $ref: "#/components/schemas/source-recruitee" + - title: source-recurly + $ref: "#/components/schemas/source-recurly" + - title: source-reddit + $ref: "#/components/schemas/source-reddit" + - title: source-redshift + $ref: "#/components/schemas/source-redshift" + - title: source-retently + $ref: "#/components/schemas/source-retently" + - title: source-rki-covid + $ref: "#/components/schemas/source-rki-covid" + - title: source-rollbar + $ref: "#/components/schemas/source-rollbar" + - title: source-rss + $ref: "#/components/schemas/source-rss" + - title: source-s3 + $ref: "#/components/schemas/source-s3" + - title: source-salesforce + $ref: "#/components/schemas/source-salesforce" + - title: source-salesloft + $ref: "#/components/schemas/source-salesloft" + - title: source-sap-fieldglass + $ref: "#/components/schemas/source-sap-fieldglass" + - title: source-savvycal + $ref: "#/components/schemas/source-savvycal" + - title: source-scryfall + $ref: "#/components/schemas/source-scryfall" + - title: source-secoda + $ref: "#/components/schemas/source-secoda" + - title: source-sendgrid + $ref: "#/components/schemas/source-sendgrid" + - title: source-sendinblue + $ref: "#/components/schemas/source-sendinblue" + - title: source-senseforce + $ref: "#/components/schemas/source-senseforce" + - title: source-sentry + $ref: "#/components/schemas/source-sentry" + - title: source-sftp + $ref: "#/components/schemas/source-sftp" + - title: source-sftp-bulk + $ref: "#/components/schemas/source-sftp-bulk" + - title: source-shopify + $ref: "#/components/schemas/source-shopify" + - title: source-shortcut + $ref: "#/components/schemas/source-shortcut" + - title: source-shortio + $ref: "#/components/schemas/source-shortio" + - title: source-slack + $ref: "#/components/schemas/source-slack" + - title: source-smaily + $ref: "#/components/schemas/source-smaily" + - title: source-smartengage + $ref: "#/components/schemas/source-smartengage" + - title: source-smartsheets + $ref: "#/components/schemas/source-smartsheets" + - title: source-snapchat-marketing + $ref: "#/components/schemas/source-snapchat-marketing" + - title: source-snowflake + $ref: "#/components/schemas/source-snowflake" + - title: source-sonar-cloud + $ref: "#/components/schemas/source-sonar-cloud" + - title: source-spacex-api + $ref: "#/components/schemas/source-spacex-api" + - title: source-split-io + $ref: "#/components/schemas/source-split-io" + - title: source-square + $ref: "#/components/schemas/source-square" + - title: source-strava + $ref: "#/components/schemas/source-strava" + - title: source-stripe + $ref: "#/components/schemas/source-stripe" + - title: source-survey-sparrow + $ref: "#/components/schemas/source-survey-sparrow" + - title: source-surveymonkey + $ref: "#/components/schemas/source-surveymonkey" + - title: source-survicate + $ref: "#/components/schemas/source-survicate" + - title: source-teamwork + $ref: "#/components/schemas/source-teamwork" + - title: source-tempo + $ref: "#/components/schemas/source-tempo" + - title: source-the-guardian-api + $ref: "#/components/schemas/source-the-guardian-api" + - title: source-tiktok-marketing + $ref: "#/components/schemas/source-tiktok-marketing" + - title: source-trello + $ref: "#/components/schemas/source-trello" + - title: source-trustpilot + $ref: "#/components/schemas/source-trustpilot" + - title: source-tvmaze-schedule + $ref: "#/components/schemas/source-tvmaze-schedule" + - title: source-twilio + $ref: "#/components/schemas/source-twilio" + - title: source-twilio-taskrouter + $ref: "#/components/schemas/source-twilio-taskrouter" + - title: source-twitter + $ref: "#/components/schemas/source-twitter" + - title: source-typeform + $ref: "#/components/schemas/source-typeform" + - title: source-us-census + $ref: "#/components/schemas/source-us-census" + - title: source-vantage + $ref: "#/components/schemas/source-vantage" + - title: source-vwo + $ref: "#/components/schemas/source-vwo" + - title: source-webflow + $ref: "#/components/schemas/source-webflow" + - title: source-when-i-work + $ref: "#/components/schemas/source-when-i-work" + - title: source-whisky-hunter + $ref: "#/components/schemas/source-whisky-hunter" + - title: source-wikipedia-pageviews + $ref: "#/components/schemas/source-wikipedia-pageviews" + - title: source-woocommerce + $ref: "#/components/schemas/source-woocommerce" + - title: source-xkcd + $ref: "#/components/schemas/source-xkcd" + - title: source-yandex-metrica + $ref: "#/components/schemas/source-yandex-metrica" + - title: source-yotpo + $ref: "#/components/schemas/source-yotpo" + - title: source-youtube-analytics + $ref: "#/components/schemas/source-youtube-analytics" + - title: source-zendesk-chat + $ref: "#/components/schemas/source-zendesk-chat" + - title: source-zendesk-sunshine + $ref: "#/components/schemas/source-zendesk-sunshine" + - title: source-zendesk-support + $ref: "#/components/schemas/source-zendesk-support" + - title: source-zendesk-talk + $ref: "#/components/schemas/source-zendesk-talk" + - title: source-zenloop + $ref: "#/components/schemas/source-zenloop" + - title: source-zoho-crm + $ref: "#/components/schemas/source-zoho-crm" + - title: source-zoom + $ref: "#/components/schemas/source-zoom" + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + oneOf: + - title: destination-google-sheets + $ref: "#/components/schemas/destination-google-sheets" + - title: destination-astra + $ref: "#/components/schemas/destination-astra" + - title: destination-aws-datalake + $ref: "#/components/schemas/destination-aws-datalake" + - title: destination-azure-blob-storage + $ref: "#/components/schemas/destination-azure-blob-storage" + - title: destination-bigquery + $ref: "#/components/schemas/destination-bigquery" + - title: destination-clickhouse + $ref: "#/components/schemas/destination-clickhouse" + - title: destination-convex + $ref: "#/components/schemas/destination-convex" + - title: destination-databricks + $ref: "#/components/schemas/destination-databricks" + - title: destination-dev-null + $ref: "#/components/schemas/destination-dev-null" + - title: destination-duckdb + $ref: "#/components/schemas/destination-duckdb" + - title: destination-dynamodb + $ref: "#/components/schemas/destination-dynamodb" + - title: destination-elasticsearch + $ref: "#/components/schemas/destination-elasticsearch" + - title: destination-firebolt + $ref: "#/components/schemas/destination-firebolt" + - title: destination-firestore + $ref: "#/components/schemas/destination-firestore" + - title: destination-gcs + $ref: "#/components/schemas/destination-gcs" + - title: destination-iceberg + $ref: "#/components/schemas/destination-iceberg" + - title: destination-milvus + $ref: "#/components/schemas/destination-milvus" + - title: destination-mongodb + $ref: "#/components/schemas/destination-mongodb" + - title: destination-mssql + $ref: "#/components/schemas/destination-mssql" + - title: destination-mysql + $ref: "#/components/schemas/destination-mysql" + - title: destination-oracle + $ref: "#/components/schemas/destination-oracle" + - title: destination-pgvector + $ref: "#/components/schemas/destination-pgvector" + - title: destination-pinecone + $ref: "#/components/schemas/destination-pinecone" + - title: destination-postgres + $ref: "#/components/schemas/destination-postgres" + - title: destination-pubsub + $ref: "#/components/schemas/destination-pubsub" + - title: destination-qdrant + $ref: "#/components/schemas/destination-qdrant" + - title: destination-redis + $ref: "#/components/schemas/destination-redis" + - title: destination-redshift + $ref: "#/components/schemas/destination-redshift" + - title: destination-s3 + $ref: "#/components/schemas/destination-s3" + - title: destination-s3-glue + $ref: "#/components/schemas/destination-s3-glue" + - title: destination-sftp-json + $ref: "#/components/schemas/destination-sftp-json" + - title: destination-snowflake + $ref: "#/components/schemas/destination-snowflake" + - title: destination-snowflake-cortex + $ref: "#/components/schemas/destination-snowflake-cortex" + - title: destination-teradata + $ref: "#/components/schemas/destination-teradata" + - title: destination-timeplus + $ref: "#/components/schemas/destination-timeplus" + - title: destination-typesense + $ref: "#/components/schemas/destination-typesense" + - title: destination-vectara + $ref: "#/components/schemas/destination-vectara" + - title: destination-weaviate + $ref: "#/components/schemas/destination-weaviate" + - title: destination-yellowbrick + $ref: "#/components/schemas/destination-yellowbrick" + OAuthCredentialsConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + oneOf: + - title: airtable + $ref: "#/components/schemas/airtable" + - title: amazon-ads + $ref: "#/components/schemas/amazon-ads" + - title: amazon-seller-partner + $ref: "#/components/schemas/amazon-seller-partner" + - title: asana + $ref: "#/components/schemas/asana" + - title: azure-blob-storage + $ref: "#/components/schemas/azure-blob-storage" + - title: bing-ads + $ref: "#/components/schemas/bing-ads" + - title: facebook-marketing + $ref: "#/components/schemas/facebook-marketing" + - title: github + $ref: "#/components/schemas/github" + - title: gitlab + $ref: "#/components/schemas/gitlab" + - title: google-ads + $ref: "#/components/schemas/google-ads" + - title: google-analytics-data-api + $ref: "#/components/schemas/google-analytics-data-api" + - title: google-drive + $ref: "#/components/schemas/google-drive" + - title: google-search-console + $ref: "#/components/schemas/google-search-console" + - title: google-sheets + $ref: "#/components/schemas/google-sheets" + - title: hubspot + $ref: "#/components/schemas/hubspot" + - title: instagram + $ref: "#/components/schemas/instagram" + - title: intercom + $ref: "#/components/schemas/intercom" + - title: lever-hiring + $ref: "#/components/schemas/lever-hiring" + - title: linkedin-ads + $ref: "#/components/schemas/linkedin-ads" + - title: mailchimp + $ref: "#/components/schemas/mailchimp" + - title: microsoft-onedrive + $ref: "#/components/schemas/microsoft-onedrive" + - title: microsoft-sharepoint + $ref: "#/components/schemas/microsoft-sharepoint" + - title: microsoft-teams + $ref: "#/components/schemas/microsoft-teams" + - title: monday + $ref: "#/components/schemas/monday" + - title: notion + $ref: "#/components/schemas/notion" + - title: pinterest + $ref: "#/components/schemas/pinterest" + - title: retently + $ref: "#/components/schemas/retently" + - title: salesforce + $ref: "#/components/schemas/salesforce" + - title: shopify + $ref: "#/components/schemas/shopify" + - title: slack + $ref: "#/components/schemas/slack" + - title: smartsheets + $ref: "#/components/schemas/smartsheets" + - title: snapchat-marketing + $ref: "#/components/schemas/snapchat-marketing" + - title: snowflake + $ref: "#/components/schemas/snowflake" + - title: square + $ref: "#/components/schemas/square" + - title: surveymonkey + $ref: "#/components/schemas/surveymonkey" + - title: tiktok-marketing + $ref: "#/components/schemas/tiktok-marketing" + - title: trello + $ref: "#/components/schemas/trello" + - title: typeform + $ref: "#/components/schemas/typeform" + - title: youtube-analytics + $ref: "#/components/schemas/youtube-analytics" + - title: zendesk-chat + $ref: "#/components/schemas/zendesk-chat" + - title: zendesk-sunshine + $ref: "#/components/schemas/zendesk-sunshine" + - title: zendesk-support + $ref: "#/components/schemas/zendesk-support" + - title: zendesk-talk + $ref: "#/components/schemas/zendesk-talk" + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT + basicAuth: + type: http + scheme: basic + clientCredentials: + type: oauth2 + flows: + clientCredentials: + tokenUrl: /applications/token + scopes: {} +security: + - bearerAuth: [] + - basicAuth: [] + - clientCredentials: [] diff --git a/airbyte-api/server-api/src/main/openapi/api_terraform.yaml b/airbyte-api/server-api/src/main/openapi/api_terraform.yaml new file mode 100644 index 00000000000..a9b2e694f3a --- /dev/null +++ b/airbyte-api/server-api/src/main/openapi/api_terraform.yaml @@ -0,0 +1,100904 @@ +openapi: "3.1.0" +info: + title: "airbyte-api" + version: "1.0.0" + description: "Programatically control Airbyte Cloud, OSS & Enterprise." +servers: + - url: "https://api.airbyte.com/v1" + description: "Airbyte API v1" +paths: + /health: + get: + tags: + - "public_health" + - "public" + responses: + "200": + description: "Successful operation" + operationId: "getHealthCheck" + summary: "Health Check" + security: [] + x-speakeasy-alias: "getHealthCheck" + x-speakeasy-group: "Health" + /jobs: + get: + tags: + - "public_jobs" + - "public" + - "Jobs" + parameters: + - name: "connectionId" + description: "Filter the Jobs by connectionId." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of Jobs returned. The default is\ + \ 20 Jobs." + schema: + format: "int32" + default: 20 + maximum: 100 + minimum: 1 + type: "integer" + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning Jobs. The default\ + \ is 0." + schema: + format: "int32" + default: 0 + minimum: 0 + type: "integer" + in: "query" + - name: "jobType" + description: "Filter the Jobs by jobType." + schema: + $ref: "#/components/schemas/JobTypeEnum" + in: "query" + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list jobs for. Empty\ + \ list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "status" + description: "The Job status you want to filter by" + schema: + $ref: "#/components/schemas/JobStatusEnum" + in: "query" + required: false + - name: "createdAtStart" + description: "The start date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "createdAtEnd" + description: "The end date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "updatedAtStart" + description: "The start date to filter by" + schema: + type: "string" + format: "date-time" + example: 1687450500000 + in: "query" + required: false + - name: "updatedAtEnd" + description: "The end date to filter by" + schema: + type: "string" + format: "date-time" + in: "query" + required: false + example: 1687450500000 + - name: "orderBy" + description: "The field and method to use for ordering" + schema: + type: "string" + pattern: "\\w+|(ASC|DESC)" + in: "query" + required: false + example: "updatedAt|DESC" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobsResponse" + examples: + Job List Response Example: + value: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + description: "List all the Jobs by connectionId." + "403": + description: "Not allowed" + operationId: "listJobs" + summary: "List Jobs by sync type" + x-speakeasy-alias: "listJobs" + x-speakeasy-group: "Jobs" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/JobCreateRequest" + examples: + Job Creation Request Example: + value: + connectionId: "e735894a-e773-4938-969f-45f53957b75b" + jobType: "sync" + required: true + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Creation Response Example: + value: + jobId: 1234 + status: "running" + jobType: "sync" + description: + "Kicks off a new Job based on the JobType. The connectionId\ + \ is the resource that Job will be run for." + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createJob" + summary: "Trigger a sync or reset job of a connection" + x-speakeasy-alias: "createJob" + x-speakeasy-group: "Jobs" + /jobs/{jobId}: + get: + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + examples: + Job Get Response Example: + value: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + description: "Get a Job by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getJob" + x-speakeasy-alias: "getJob" + x-speakeasy-group: "Jobs" + summary: "Get Job status and details" + delete: + tags: + - "public_jobs" + - "public" + - "Jobs" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/JobResponse" + description: "Cancel a Job." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "cancelJob" + x-speakeasy-alias: "cancelJob" + x-speakeasy-group: "Jobs" + summary: "Cancel a running Job" + parameters: + - name: "jobId" + schema: + format: "int64" + type: "integer" + in: "path" + required: true + /sources: + get: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourcesResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listSources" + x-speakeasy-alias: "listSources" + x-speakeasy-group: "Sources" + summary: "List sources" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list sources for. Empty\ + \ list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + example: "df08f6b0-b364-4cc1-9b3f-96f5d2fccfb2,b0796797-de23-4fc7-a5e2-7e131314718c" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted sources in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of sources returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning sources. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCreateRequest" + examples: + Source Creation Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Creation Response Example: + value: + sourceId: "0c31738c-0b2d-4887-b506-e2cd1c39cc35" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSource" + x-speakeasy-alias: "createSource" + x-speakeasy-group: "Sources" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + /sources/{sourceId}: + get: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Get Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSource" + x-speakeasy-alias: "getSource" + x-speakeasy-group: "Sources" + summary: "Get Source details" + patch: + tags: + - "public_sources" + - "public" + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePatchRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a Source" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchSource" + x-speakeasy-alias: "patchSource" + x-speakeasy-group: "Sources" + summary: "Update a Source" + put: + tags: + - "public_sources" + - "public" + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePutRequest" + examples: + Source Update Request Example: + value: + configuration: + airbyte_source_name: "google-ads" + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Source" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + examples: + Source Update Response Example: + value: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a source and fully overwrite it" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSource" + x-speakeasy-alias: "putSource" + x-speakeasy-group: "Sources" + summary: "Update a Source and fully overwrite it" + x-speakeasy-entity-operation: Source#update + delete: + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSource" + x-speakeasy-alias: "deleteSource" + x-speakeasy-group: "Sources" + summary: "Delete a Source" + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations: + get: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationsResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listDestinations" + x-speakeasy-alias: "listDestinations" + x-speakeasy-group: "Destinations" + summary: "List destinations" + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list destinations for.\ + \ Empty list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted destinations in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of destinations returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning destinations. The\ + \ default is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationCreateRequest" + examples: + Destination Creation Request Example: + value: + name: "Postgres" + workspaceId: "2155ae5a-de39-4808-af6a-16fe7b8b4ed2" + configuration: + airbyte_destination_name: "postgres" + port: 5432 + schema: "public" + ssl_mode: + mode: "prefer" + tunnel_method: + tunnel_method: "NO_TUNNEL" + host: "localhost" + database: "postgres" + username: "postgres" + password: "test" + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Creation Response Example: + value: + destinationId: "af0c3c67-aa61-419f-8922-95b0bf840e86" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "createDestination" + x-speakeasy-alias: "createDestination" + x-speakeasy-group: "Destinations" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob\ + \ containing the configuration for the source." + /destinations/{destinationId}: + get: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Get Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "My Destination" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestination" + x-speakeasy-alias: "getDestination" + x-speakeasy-group: "Destinations" + summary: "Get Destination details" + delete: + tags: + - "public_destinations" + - "public" + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestination" + x-speakeasy-alias: "deleteDestination" + x-speakeasy-group: "Destinations" + summary: "Delete a Destination" + patch: + tags: + - "public_destinations" + - "public" + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPatchRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Destination" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Update a Destination" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchDestination" + x-speakeasy-alias: "patchDestination" + x-speakeasy-group: "Destinations" + summary: "Update a Destination" + put: + tags: + - "public_destinations" + - "public" + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPutRequest" + examples: + Destination Update Request Example: + value: + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + name: "My Destination" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + examples: + Destination Update Response Example: + value: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "running" + sourceType: "postgres" + workspaceId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + configuration: + conversion_window_days: 14 + customer_id: "1234567890" + start_date: 1672531200000 + end_date: 1704067200000 + description: "Update a Destination and fully overwrite it" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestination" + x-speakeasy-alias: "putDestination" + x-speakeasy-group: "Destinations" + summary: "Update a Destination and fully overwrite it" + x-speakeasy-entity-operation: Destination#update + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources/initiateOAuth: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/InitiateOauthRequest" + required: true + tags: + - "public_sources" + - "public" + - "Sources" + responses: + "200": + $ref: "#/components/responses/InitiateOauthResponse" + "400": + description: "A field in the body has not been set appropriately." + "403": + description: "API key is invalid." + operationId: "initiateOAuth" + x-speakeasy-alias: "initiateOAuth" + x-speakeasy-group: "Sources" + summary: "Initiate OAuth for a source" + description: + "Given a source ID, workspace ID, and redirect URL, initiates OAuth\ + \ for the source.\n\nThis returns a fully formed URL for performing user authentication\ + \ against the relevant source identity provider (IdP). Once authentication\ + \ has been completed, the IdP will redirect to an Airbyte endpoint which will\ + \ save the access and refresh tokens off as a secret and return the secret\ + \ ID to the redirect URL specified in the `secret_id` query string parameter.\n\ + \nThat secret ID can be used to create a source with credentials in place\ + \ of actual tokens." + /connections: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionCreateRequest" + examples: + Connection Creation Request Example: + value: + sourceId: "95e66a59-8045-4307-9678-63bc3c9b8c93" + destinationId: "e478de0d-a3a0-475c-b019-25f7dd29e281" + name: "Postgres-to-Bigquery" + required: true + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Creation Response Example: + value: + connectionId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createConnection" + x-speakeasy-alias: "createConnection" + x-speakeasy-group: "Connections" + summary: "Create a connection" + x-speakeasy-entity-operation: Connection#create + get: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionsResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listConnections" + x-speakeasy-alias: "listConnections" + x-speakeasy-group: "Connections" + summary: "List connections" + x-speakeasy-entity-operation: Connection#list + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to list connections for.\ + \ Empty list will retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted connections in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of Connections returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning Connections. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + /connections/{connectionId}: + get: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Postgres To Snowflake" + sourceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + destinationId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Get a Connection by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getConnection" + x-speakeasy-alias: "getConnection" + x-speakeasy-group: "Connections" + summary: "Get Connection details" + x-speakeasy-entity-operation: Connection#read + patch: + tags: + - "public_connections" + - "public" + - "Connections" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionPatchRequest" + examples: + Connection Update Request Example: + value: + sourceId: "95e66a59-8045-4307-9678-63bc3c9b8c93" + destinationId: "e478de0d-a3a0-475c-b019-25f7dd29e281" + name: "Postgres-to-Bigquery" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/ConnectionResponse" + examples: + Connection Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Postgres To Snowflake" + sourceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + destinationId: "744cc0ed-7f05-4949-9e60-2a814f90c035" + description: "Update a Connection by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "patchConnection" + x-speakeasy-alias: "patchConnection" + x-speakeasy-group: "Connections" + summary: "Update Connection details" + x-speakeasy-entity-operation: Connection#update + delete: + tags: + - "public_connections" + - "public" + - "Connections" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteConnection" + x-speakeasy-alias: "deleteConnection" + x-speakeasy-group: "Connections" + summary: "Delete a Connection" + x-speakeasy-entity-operation: Connection#delete + parameters: + - name: "connectionId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /streams: + get: + tags: + - "public_streams" + - "public" + - "Streams" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/StreamPropertiesResponse" + description: + "Get the available streams properties for a source/destination\ + \ pair." + "400": + description: "Required parameters are missing" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getStreamProperties" + x-speakeasy-alias: "getStreamProperties" + x-speakeasy-group: "Streams" + summary: "Get stream properties" + parameters: + - name: "sourceId" + description: "ID of the source" + schema: + format: "UUID" + type: "string" + in: "query" + required: true + - name: "destinationId" + description: "ID of the destination" + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "ignoreCache" + description: + "If true pull the latest schema from the source, else pull from\ + \ cache (default false)" + schema: + type: "boolean" + default: false + in: "query" + required: false + /workspaces: + get: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspacesResponse" + description: "Successful operation" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listWorkspaces" + x-speakeasy-alias: "listWorkspaces" + x-speakeasy-group: "Workspaces" + summary: "List workspaces" + x-speakeasy-entity-operation: Workspace#list + parameters: + - name: "workspaceIds" + description: + "The UUIDs of the workspaces you wish to fetch. Empty list will\ + \ retrieve all allowed workspaces." + schema: + type: "array" + items: + format: "uuid" + type: "string" + in: "query" + required: false + - name: "includeDeleted" + description: "Include deleted workspaces in the returned results." + schema: + default: false + type: "boolean" + in: "query" + required: false + - name: "limit" + description: + "Set the limit on the number of workspaces returned. The default\ + \ is 20." + schema: + format: "int32" + type: "integer" + minimum: 1 + maximum: 100 + default: 20 + in: "query" + - name: "offset" + description: + "Set the offset to start at when returning workspaces. The default\ + \ is 0" + schema: + type: "integer" + format: "int32" + minimum: 0 + default: 0 + in: "query" + post: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceCreateRequest" + examples: + Workspace Creation Request Example: + value: + name: "Company Workspace Name" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Creation Response Example: + value: + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createWorkspace" + x-speakeasy-alias: "createWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Create a workspace" + x-speakeasy-entity-operation: Workspace#create + /workspaces/{workspaceId}: + parameters: + - name: "workspaceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + get: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Get Response Example: + value: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + description: "Get a Workspace by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getWorkspace" + x-speakeasy-alias: "getWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Get Workspace details" + x-speakeasy-entity-operation: Workspace#read + patch: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceUpdateRequest" + examples: + Workspace Update Request Example: + value: + name: "Company Workspace Name" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceResponse" + examples: + Workspace Update Response Example: + value: + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "updateWorkspace" + x-speakeasy-alias: "updateWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Update a workspace" + x-speakeasy-entity-operation: Workspace#update + delete: + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteWorkspace" + x-speakeasy-alias: "deleteWorkspace" + x-speakeasy-group: "Workspaces" + summary: "Delete a Workspace" + x-speakeasy-entity-operation: Workspace#delete + /workspaces/{workspaceId}/oauthCredentials: + put: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/WorkspaceOAuthCredentialsRequest" + required: true + tags: + - "public_workspaces" + - "public" + - "Workspaces" + responses: + "200": + description: "OAuth credential override was successful." + "400": + description: "A field in the body has not been set appropriately." + "403": + description: "API key is invalid." + operationId: "createOrUpdateWorkspaceOAuthCredentials" + x-speakeasy-alias: "createOrUpdateWorkspaceOAuthCredentials" + x-speakeasy-group: "Workspaces" + summary: "Create OAuth override credentials for a workspace and source type." + description: + "Create/update a set of OAuth credentials to override the Airbyte-provided\ + \ OAuth credentials used for source/destination OAuth.\nIn order to determine\ + \ what the credential configuration needs to be, please see the connector\ + \ specification of the relevant source/destination." + parameters: + - name: "workspaceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /permissions/{permissionId}: + parameters: + - name: "permissionId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + get: + tags: + - "public_permissions" + - "public" + - "Permissions" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: "Get a Permission by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "getPermission" + summary: "Get Permission details" + x-speakeasy-alias: "getPermission" + x-speakeasy-group: "Permissions" + x-speakeasy-entity-operation: Permission#read + patch: + tags: + - "public_permissions" + - "public" + - "Permissions" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionUpdateRequest" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + description: "Successful updated" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "updatePermission" + summary: "Update a permission" + x-speakeasy-alias: "updatePermission" + x-speakeasy-group: "Permissions" + x-speakeasy-entity-operation: Permission#update + delete: + tags: + - "public_permissions" + - "public" + - "Permissions" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + "422": + description: "Data issue" + operationId: "deletePermission" + x-speakeasy-alias: "deletePermission" + x-speakeasy-group: "Permissions" + summary: "Delete a Permission" + x-speakeasy-entity-operation: Permission#delete + /permissions: + get: + tags: + - "public_permissions" + - "public" + - "Permissions" + parameters: + - name: "userId" + description: "User Id in permission." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + - name: "organizationId" + description: + "This is required if you want to read someone else's permissions,\ + \ and you should have organization admin or a higher role." + schema: + format: "UUID" + type: "string" + in: "query" + required: false + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionsResponse" + description: "List Permissions." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listPermissions" + x-speakeasy-alias: "listPermissions" + x-speakeasy-group: "Permissions" + summary: "List Permissions by user id" + post: + tags: + - "public_permissions" + - "public" + - "Permissions" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionCreateRequest" + examples: + Permission Creation Request Example: + value: + permissionType: "workspace_admin" + userId: "7d08fd6c-531e-4a00-937e-3d355f253e63" + workspaceId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + required: true + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/PermissionResponse" + examples: + Permission Creation Response Example: + value: + permissionId: "9924bcd0-99be-453d-ba47-c2c9766f7da5" + permissionType: "workspace_admin" + userId: "7d08fd6c-531e-4a00-937e-3d355f253e63" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createPermission" + x-speakeasy-alias: "createPermission" + x-speakeasy-group: "Permissions" + summary: "Create a permission" + x-speakeasy-entity-operation: Permission#create + /organizations: + get: + tags: + - "public_organizations" + - "public" + - "Organizations" + summary: "List all organizations for a user" + description: "Lists users organizations." + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/OrganizationsResponse" + description: "List user's organizations." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listOrganizationsForUser" + x-speakeasy-alias: "listOrganizationsForUser" + x-speakeasy-group: "Organizations" + /users: + get: + tags: + - "public_users" + - "public" + - "Users" + summary: "List all users within an organization" + description: + "Organization Admin user can list all users within the same organization.\ + \ Also provide filtering on a list of user IDs or/and a list of user emails." + parameters: + - in: "query" + name: "organizationId" + schema: + type: "string" + format: "UUID" + required: true + - in: "query" + name: "ids" + schema: + type: "array" + items: + type: "string" + format: "UUID" + description: "List of user IDs to filter by" + - in: "query" + name: "emails" + schema: + type: "array" + items: + type: "string" + format: "email" + description: "List of user emails to filter by" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/UsersResponse" + description: "List Users." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "listUsersWithinAnOrganization" + x-speakeasy-alias: "listUsersWithinAnOrganization" + x-speakeasy-group: "Users" + /sources#7shifts: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Source7shiftsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSource7shifts" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_7shifts#create + /sources/{sourceId}#7shifts: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSource7shifts" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_7shifts#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/Source7shiftsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSource7shifts" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_7shifts#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSource7shifts" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_7shifts#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Aha: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAhaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAha" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aha#create + /sources/{sourceId}#Aha: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAha" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aha#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAhaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAha" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aha#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAha" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aha#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Airbyte: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAirbyteCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAirbyte" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airbyte#create + /sources/{sourceId}#Airbyte: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAirbyte" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airbyte#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAirbytePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAirbyte" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airbyte#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAirbyte" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airbyte#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Aircall: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAircallCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAircall" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aircall#create + /sources/{sourceId}#Aircall: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAircall" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aircall#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAircallPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAircall" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aircall#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAircall" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Aircall#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Airtable: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAirtableCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAirtable" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airtable#create + /sources/{sourceId}#Airtable: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAirtable" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airtable#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAirtablePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAirtable" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airtable#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAirtable" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Airtable#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Algolia: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAlgoliaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAlgolia" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Algolia#create + /sources/{sourceId}#Algolia: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAlgolia" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Algolia#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAlgoliaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAlgolia" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Algolia#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAlgolia" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Algolia#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#AmazonAds: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmazonAdsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAmazonAds" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonAds#create + /sources/{sourceId}#AmazonAds: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAmazonAds" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonAds#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmazonAdsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAmazonAds" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonAds#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAmazonAds" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonAds#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#AmazonSellerPartner: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmazonSellerPartnerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAmazonSellerPartner" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSellerPartner#create + /sources/{sourceId}#AmazonSellerPartner: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAmazonSellerPartner" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSellerPartner#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmazonSellerPartnerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAmazonSellerPartner" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSellerPartner#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAmazonSellerPartner" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSellerPartner#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#AmazonSqs: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmazonSqsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAmazonSqs" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSqs#create + /sources/{sourceId}#AmazonSqs: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAmazonSqs" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSqs#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmazonSqsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAmazonSqs" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSqs#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAmazonSqs" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AmazonSqs#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Amplitude: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmplitudeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAmplitude" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Amplitude#create + /sources/{sourceId}#Amplitude: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAmplitude" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Amplitude#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAmplitudePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAmplitude" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Amplitude#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAmplitude" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Amplitude#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ApifyDataset: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceApifyDatasetCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceApifyDataset" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ApifyDataset#create + /sources/{sourceId}#ApifyDataset: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceApifyDataset" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ApifyDataset#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceApifyDatasetPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceApifyDataset" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ApifyDataset#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceApifyDataset" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ApifyDataset#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Appcues: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAppcuesCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAppcues" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appcues#create + /sources/{sourceId}#Appcues: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAppcues" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appcues#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAppcuesPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAppcues" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appcues#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAppcues" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appcues#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Appfigures: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAppfiguresCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAppfigures" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfigures#create + /sources/{sourceId}#Appfigures: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAppfigures" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfigures#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAppfiguresPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAppfigures" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfigures#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAppfigures" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfigures#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Appfollow: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAppfollowCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAppfollow" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfollow#create + /sources/{sourceId}#Appfollow: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAppfollow" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfollow#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAppfollowPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAppfollow" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfollow#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAppfollow" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Appfollow#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Asana: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAsanaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAsana" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Asana#create + /sources/{sourceId}#Asana: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAsana" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Asana#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAsanaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAsana" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Asana#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAsana" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Asana#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Auth0: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAuth0CreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAuth0" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Auth0#create + /sources/{sourceId}#Auth0: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAuth0" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Auth0#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAuth0PutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAuth0" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Auth0#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAuth0" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Auth0#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#AwsCloudtrail: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAwsCloudtrailCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAwsCloudtrail" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AwsCloudtrail#create + /sources/{sourceId}#AwsCloudtrail: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAwsCloudtrail" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AwsCloudtrail#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAwsCloudtrailPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAwsCloudtrail" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AwsCloudtrail#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAwsCloudtrail" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AwsCloudtrail#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#AzureBlobStorage: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAzureBlobStorageCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAzureBlobStorage" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureBlobStorage#create + /sources/{sourceId}#AzureBlobStorage: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAzureBlobStorage" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureBlobStorage#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAzureBlobStoragePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAzureBlobStorage" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureBlobStorage#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAzureBlobStorage" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureBlobStorage#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#AzureTable: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAzureTableCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceAzureTable" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureTable#create + /sources/{sourceId}#AzureTable: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceAzureTable" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureTable#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceAzureTablePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceAzureTable" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureTable#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceAzureTable" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_AzureTable#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#BambooHr: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBambooHrCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBambooHr" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BambooHr#create + /sources/{sourceId}#BambooHr: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBambooHr" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BambooHr#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBambooHrPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBambooHr" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BambooHr#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBambooHr" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BambooHr#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Basecamp: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBasecampCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBasecamp" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Basecamp#create + /sources/{sourceId}#Basecamp: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBasecamp" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Basecamp#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBasecampPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBasecamp" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Basecamp#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBasecamp" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Basecamp#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Beamer: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBeamerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBeamer" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Beamer#create + /sources/{sourceId}#Beamer: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBeamer" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Beamer#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBeamerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBeamer" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Beamer#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBeamer" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Beamer#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Bigquery: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBigqueryCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBigquery" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigquery#create + /sources/{sourceId}#Bigquery: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBigquery" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigquery#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBigqueryPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBigquery" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigquery#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBigquery" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bigquery#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#BingAds: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBingAdsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBingAds" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BingAds#create + /sources/{sourceId}#BingAds: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBingAds" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BingAds#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBingAdsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBingAds" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BingAds#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBingAds" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BingAds#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Bitly: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBitlyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBitly" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bitly#create + /sources/{sourceId}#Bitly: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBitly" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bitly#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBitlyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBitly" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bitly#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBitly" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Bitly#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Braintree: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBraintreeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBraintree" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braintree#create + /sources/{sourceId}#Braintree: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBraintree" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braintree#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBraintreePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBraintree" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braintree#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBraintree" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braintree#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Braze: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBrazeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBraze" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braze#create + /sources/{sourceId}#Braze: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBraze" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braze#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBrazePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBraze" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braze#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBraze" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Braze#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#BreezyHr: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBreezyHrCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBreezyHr" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BreezyHr#create + /sources/{sourceId}#BreezyHr: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBreezyHr" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BreezyHr#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBreezyHrPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBreezyHr" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BreezyHr#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBreezyHr" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_BreezyHr#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Brevo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBrevoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBrevo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brevo#create + /sources/{sourceId}#Brevo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBrevo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brevo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBrevoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBrevo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brevo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBrevo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Brevo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Buildkite: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBuildkiteCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBuildkite" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buildkite#create + /sources/{sourceId}#Buildkite: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBuildkite" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buildkite#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBuildkitePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBuildkite" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buildkite#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBuildkite" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buildkite#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Buzzsprout: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBuzzsproutCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceBuzzsprout" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buzzsprout#create + /sources/{sourceId}#Buzzsprout: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceBuzzsprout" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buzzsprout#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceBuzzsproutPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceBuzzsprout" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buzzsprout#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceBuzzsprout" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Buzzsprout#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Calendly: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCalendlyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCalendly" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Calendly#create + /sources/{sourceId}#Calendly: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCalendly" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Calendly#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCalendlyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCalendly" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Calendly#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCalendly" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Calendly#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Canny: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCannyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCanny" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Canny#create + /sources/{sourceId}#Canny: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCanny" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Canny#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCannyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCanny" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Canny#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCanny" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Canny#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Cart: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCartCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCart" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cart#create + /sources/{sourceId}#Cart: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCart" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cart#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCartPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCart" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cart#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCart" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cart#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Chameleon: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceChameleonCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceChameleon" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chameleon#create + /sources/{sourceId}#Chameleon: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceChameleon" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chameleon#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceChameleonPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceChameleon" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chameleon#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceChameleon" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chameleon#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Chargebee: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceChargebeeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceChargebee" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chargebee#create + /sources/{sourceId}#Chargebee: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceChargebee" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chargebee#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceChargebeePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceChargebee" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chargebee#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceChargebee" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chargebee#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Chartmogul: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceChartmogulCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceChartmogul" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chartmogul#create + /sources/{sourceId}#Chartmogul: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceChartmogul" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chartmogul#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceChartmogulPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceChartmogul" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chartmogul#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceChartmogul" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Chartmogul#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Cimis: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCimisCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCimis" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cimis#create + /sources/{sourceId}#Cimis: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCimis" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cimis#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCimisPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCimis" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cimis#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCimis" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Cimis#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Clazar: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClazarCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceClazar" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clazar#create + /sources/{sourceId}#Clazar: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceClazar" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clazar#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClazarPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceClazar" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clazar#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceClazar" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clazar#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Clickhouse: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClickhouseCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceClickhouse" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clickhouse#create + /sources/{sourceId}#Clickhouse: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceClickhouse" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clickhouse#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClickhousePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceClickhouse" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clickhouse#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceClickhouse" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clickhouse#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ClickupApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClickupApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceClickupApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ClickupApi#create + /sources/{sourceId}#ClickupApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceClickupApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ClickupApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClickupApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceClickupApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ClickupApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceClickupApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ClickupApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Clockify: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClockifyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceClockify" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockify#create + /sources/{sourceId}#Clockify: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceClockify" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockify#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceClockifyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceClockify" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockify#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceClockify" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Clockify#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#CloseCom: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCloseComCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCloseCom" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CloseCom#create + /sources/{sourceId}#CloseCom: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCloseCom" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CloseCom#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCloseComPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCloseCom" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CloseCom#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCloseCom" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CloseCom#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Coda: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCodaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCoda" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coda#create + /sources/{sourceId}#Coda: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCoda" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coda#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCodaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCoda" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coda#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCoda" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coda#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#CoinApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCoinApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCoinApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CoinApi#create + /sources/{sourceId}#CoinApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCoinApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CoinApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCoinApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCoinApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CoinApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCoinApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CoinApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Coinmarketcap: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCoinmarketcapCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCoinmarketcap" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coinmarketcap#create + /sources/{sourceId}#Coinmarketcap: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCoinmarketcap" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coinmarketcap#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCoinmarketcapPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCoinmarketcap" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coinmarketcap#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCoinmarketcap" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Coinmarketcap#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Configcat: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceConfigcatCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceConfigcat" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Configcat#create + /sources/{sourceId}#Configcat: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceConfigcat" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Configcat#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceConfigcatPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceConfigcat" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Configcat#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceConfigcat" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Configcat#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Confluence: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceConfluenceCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceConfluence" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Confluence#create + /sources/{sourceId}#Confluence: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceConfluence" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Confluence#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceConfluencePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceConfluence" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Confluence#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceConfluence" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Confluence#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Convex: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceConvexCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceConvex" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Convex#create + /sources/{sourceId}#Convex: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceConvex" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Convex#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceConvexPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceConvex" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Convex#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceConvex" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Convex#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#CustomerIo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCustomerIoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCustomerIo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CustomerIo#create + /sources/{sourceId}#CustomerIo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCustomerIo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CustomerIo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCustomerIoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCustomerIo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CustomerIo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCustomerIo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_CustomerIo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Datadog: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDatadogCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDatadog" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datadog#create + /sources/{sourceId}#Datadog: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDatadog" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datadog#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDatadogPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDatadog" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datadog#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDatadog" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datadog#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Datascope: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDatascopeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDatascope" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datascope#create + /sources/{sourceId}#Datascope: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDatascope" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datascope#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDatascopePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDatascope" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datascope#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDatascope" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Datascope#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Dbt: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDbtCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDbt" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dbt#create + /sources/{sourceId}#Dbt: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDbt" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dbt#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDbtPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDbt" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dbt#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDbt" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dbt#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Delighted: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDelightedCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDelighted" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Delighted#create + /sources/{sourceId}#Delighted: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDelighted" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Delighted#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDelightedPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDelighted" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Delighted#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDelighted" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Delighted#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Dixa: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDixaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDixa" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dixa#create + /sources/{sourceId}#Dixa: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDixa" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dixa#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDixaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDixa" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dixa#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDixa" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dixa#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Dockerhub: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDockerhubCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDockerhub" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dockerhub#create + /sources/{sourceId}#Dockerhub: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDockerhub" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dockerhub#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDockerhubPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDockerhub" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dockerhub#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDockerhub" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dockerhub#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Dremio: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDremioCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDremio" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dremio#create + /sources/{sourceId}#Dremio: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDremio" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dremio#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDremioPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDremio" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dremio#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDremio" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dremio#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#DropboxSign: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDropboxSignCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDropboxSign" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_DropboxSign#create + /sources/{sourceId}#DropboxSign: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDropboxSign" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_DropboxSign#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDropboxSignPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDropboxSign" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_DropboxSign#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDropboxSign" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_DropboxSign#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Dynamodb: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDynamodbCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceDynamodb" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dynamodb#create + /sources/{sourceId}#Dynamodb: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceDynamodb" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dynamodb#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceDynamodbPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceDynamodb" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dynamodb#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceDynamodb" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Dynamodb#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Emailoctopus: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEmailoctopusCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEmailoctopus" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Emailoctopus#create + /sources/{sourceId}#Emailoctopus: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEmailoctopus" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Emailoctopus#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEmailoctopusPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEmailoctopus" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Emailoctopus#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEmailoctopus" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Emailoctopus#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Eventbrite: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEventbriteCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEventbrite" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventbrite#create + /sources/{sourceId}#Eventbrite: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEventbrite" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventbrite#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEventbritePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEventbrite" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventbrite#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEventbrite" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Eventbrite#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ExchangeRates: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceExchangeRatesCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceExchangeRates" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ExchangeRates#create + /sources/{sourceId}#ExchangeRates: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceExchangeRates" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ExchangeRates#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceExchangeRatesPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceExchangeRates" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ExchangeRates#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceExchangeRates" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ExchangeRates#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Ezofficeinventory: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEzofficeinventoryCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceEzofficeinventory" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ezofficeinventory#create + /sources/{sourceId}#Ezofficeinventory: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceEzofficeinventory" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ezofficeinventory#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceEzofficeinventoryPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceEzofficeinventory" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ezofficeinventory#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceEzofficeinventory" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ezofficeinventory#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#FacebookMarketing: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFacebookMarketingCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFacebookMarketing" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_FacebookMarketing#create + /sources/{sourceId}#FacebookMarketing: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFacebookMarketing" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_FacebookMarketing#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFacebookMarketingPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFacebookMarketing" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_FacebookMarketing#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFacebookMarketing" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_FacebookMarketing#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Faker: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFakerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFaker" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Faker#create + /sources/{sourceId}#Faker: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFaker" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Faker#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFakerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFaker" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Faker#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFaker" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Faker#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Fauna: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFaunaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFauna" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fauna#create + /sources/{sourceId}#Fauna: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFauna" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fauna#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFaunaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFauna" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fauna#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFauna" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fauna#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#File: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFileCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFile" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_File#create + /sources/{sourceId}#File: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFile" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_File#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFilePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFile" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_File#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFile" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_File#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Firebolt: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFireboltCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFirebolt" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Firebolt#create + /sources/{sourceId}#Firebolt: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFirebolt" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Firebolt#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFireboltPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFirebolt" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Firebolt#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFirebolt" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Firebolt#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Fleetio: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFleetioCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFleetio" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fleetio#create + /sources/{sourceId}#Fleetio: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFleetio" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fleetio#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFleetioPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFleetio" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fleetio#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFleetio" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Fleetio#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Freshcaller: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshcallerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFreshcaller" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshcaller#create + /sources/{sourceId}#Freshcaller: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFreshcaller" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshcaller#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshcallerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFreshcaller" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshcaller#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFreshcaller" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshcaller#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Freshchat: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshchatCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFreshchat" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshchat#create + /sources/{sourceId}#Freshchat: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFreshchat" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshchat#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshchatPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFreshchat" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshchat#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFreshchat" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshchat#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Freshdesk: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshdeskCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFreshdesk" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshdesk#create + /sources/{sourceId}#Freshdesk: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFreshdesk" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshdesk#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshdeskPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFreshdesk" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshdesk#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFreshdesk" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshdesk#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Freshsales: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshsalesCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFreshsales" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshsales#create + /sources/{sourceId}#Freshsales: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFreshsales" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshsales#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFreshsalesPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFreshsales" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshsales#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFreshsales" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Freshsales#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Front: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFrontCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceFront" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Front#create + /sources/{sourceId}#Front: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceFront" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Front#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceFrontPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceFront" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Front#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceFront" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Front#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GainsightPx: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGainsightPxCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGainsightPx" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GainsightPx#create + /sources/{sourceId}#GainsightPx: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGainsightPx" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GainsightPx#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGainsightPxPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGainsightPx" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GainsightPx#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGainsightPx" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GainsightPx#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Gcs: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGcsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGcs" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gcs#create + /sources/{sourceId}#Gcs: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGcs" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gcs#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGcsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGcs" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gcs#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGcs" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gcs#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Getlago: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGetlagoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGetlago" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Getlago#create + /sources/{sourceId}#Getlago: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGetlago" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Getlago#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGetlagoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGetlago" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Getlago#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGetlago" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Getlago#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Github: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGithubCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGithub" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Github#create + /sources/{sourceId}#Github: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGithub" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Github#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGithubPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGithub" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Github#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGithub" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Github#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Gitlab: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGitlabCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGitlab" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gitlab#create + /sources/{sourceId}#Gitlab: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGitlab" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gitlab#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGitlabPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGitlab" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gitlab#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGitlab" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gitlab#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Glassfrog: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGlassfrogCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGlassfrog" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Glassfrog#create + /sources/{sourceId}#Glassfrog: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGlassfrog" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Glassfrog#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGlassfrogPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGlassfrog" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Glassfrog#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGlassfrog" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Glassfrog#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Gnews: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGnewsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGnews" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gnews#create + /sources/{sourceId}#Gnews: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGnews" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gnews#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGnewsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGnews" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gnews#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGnews" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gnews#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Goldcast: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoldcastCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoldcast" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Goldcast#create + /sources/{sourceId}#Goldcast: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoldcast" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Goldcast#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoldcastPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoldcast" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Goldcast#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoldcast" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Goldcast#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleAds: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleAdsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleAds" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAds#create + /sources/{sourceId}#GoogleAds: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleAds" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAds#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleAdsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleAds" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAds#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleAds" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAds#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleAnalyticsDataApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleAnalyticsDataApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleAnalyticsDataApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#create + /sources/{sourceId}#GoogleAnalyticsDataApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleAnalyticsDataApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleAnalyticsDataApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleAnalyticsDataApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleAnalyticsDataApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleAnalyticsDataApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleDirectory: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleDirectoryCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleDirectory" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDirectory#create + /sources/{sourceId}#GoogleDirectory: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleDirectory" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDirectory#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleDirectoryPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleDirectory" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDirectory#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleDirectory" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDirectory#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleDrive: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleDriveCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleDrive" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDrive#create + /sources/{sourceId}#GoogleDrive: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleDrive" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDrive#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleDrivePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleDrive" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDrive#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleDrive" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleDrive#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GooglePagespeedInsights: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGooglePagespeedInsightsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGooglePagespeedInsights" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#create + /sources/{sourceId}#GooglePagespeedInsights: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGooglePagespeedInsights" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGooglePagespeedInsightsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGooglePagespeedInsights" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGooglePagespeedInsights" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GooglePagespeedInsights#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleSearchConsole: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleSearchConsoleCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleSearchConsole" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSearchConsole#create + /sources/{sourceId}#GoogleSearchConsole: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleSearchConsole" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSearchConsole#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleSearchConsolePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleSearchConsole" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSearchConsole#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleSearchConsole" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSearchConsole#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleSheets: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleSheetsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleSheets" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSheets#create + /sources/{sourceId}#GoogleSheets: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleSheets" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSheets#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleSheetsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleSheets" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSheets#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleSheets" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleSheets#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleTasks: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleTasksCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleTasks" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleTasks#create + /sources/{sourceId}#GoogleTasks: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleTasks" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleTasks#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleTasksPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleTasks" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleTasks#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleTasks" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleTasks#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#GoogleWebfonts: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleWebfontsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGoogleWebfonts" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleWebfonts#create + /sources/{sourceId}#GoogleWebfonts: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGoogleWebfonts" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleWebfonts#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGoogleWebfontsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGoogleWebfonts" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleWebfonts#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGoogleWebfonts" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_GoogleWebfonts#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Greenhouse: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGreenhouseCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGreenhouse" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Greenhouse#create + /sources/{sourceId}#Greenhouse: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGreenhouse" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Greenhouse#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGreenhousePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGreenhouse" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Greenhouse#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGreenhouse" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Greenhouse#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Gridly: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGridlyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGridly" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gridly#create + /sources/{sourceId}#Gridly: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGridly" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gridly#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGridlyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGridly" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gridly#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGridly" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Gridly#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Guru: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGuruCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceGuru" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Guru#create + /sources/{sourceId}#Guru: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceGuru" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Guru#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceGuruPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceGuru" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Guru#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceGuru" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Guru#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#HardcodedRecords: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHardcodedRecordsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHardcodedRecords" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HardcodedRecords#create + /sources/{sourceId}#HardcodedRecords: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHardcodedRecords" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HardcodedRecords#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHardcodedRecordsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHardcodedRecords" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HardcodedRecords#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHardcodedRecords" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HardcodedRecords#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Harvest: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHarvestCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHarvest" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Harvest#create + /sources/{sourceId}#Harvest: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHarvest" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Harvest#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHarvestPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHarvest" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Harvest#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHarvest" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Harvest#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Height: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHeightCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHeight" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Height#create + /sources/{sourceId}#Height: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHeight" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Height#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHeightPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHeight" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Height#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHeight" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Height#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Hibob: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHibobCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHibob" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hibob#create + /sources/{sourceId}#Hibob: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHibob" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hibob#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHibobPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHibob" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hibob#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHibob" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hibob#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#HighLevel: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHighLevelCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHighLevel" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HighLevel#create + /sources/{sourceId}#HighLevel: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHighLevel" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HighLevel#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHighLevelPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHighLevel" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HighLevel#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHighLevel" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_HighLevel#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Hubplanner: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHubplannerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHubplanner" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubplanner#create + /sources/{sourceId}#Hubplanner: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHubplanner" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubplanner#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHubplannerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHubplanner" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubplanner#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHubplanner" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubplanner#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Hubspot: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHubspotCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceHubspot" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubspot#create + /sources/{sourceId}#Hubspot: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceHubspot" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubspot#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceHubspotPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceHubspot" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubspot#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceHubspot" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Hubspot#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Insightly: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceInsightlyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceInsightly" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Insightly#create + /sources/{sourceId}#Insightly: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceInsightly" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Insightly#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceInsightlyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceInsightly" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Insightly#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceInsightly" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Insightly#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Instagram: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceInstagramCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceInstagram" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instagram#create + /sources/{sourceId}#Instagram: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceInstagram" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instagram#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceInstagramPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceInstagram" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instagram#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceInstagram" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instagram#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Instatus: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceInstatusCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceInstatus" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instatus#create + /sources/{sourceId}#Instatus: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceInstatus" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instatus#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceInstatusPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceInstatus" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instatus#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceInstatus" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Instatus#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Intercom: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceIntercomCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceIntercom" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Intercom#create + /sources/{sourceId}#Intercom: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceIntercom" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Intercom#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceIntercomPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceIntercom" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Intercom#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceIntercom" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Intercom#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Ip2whois: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceIp2whoisCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceIp2whois" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ip2whois#create + /sources/{sourceId}#Ip2whois: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceIp2whois" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ip2whois#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceIp2whoisPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceIp2whois" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ip2whois#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceIp2whois" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Ip2whois#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Iterable: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceIterableCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceIterable" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Iterable#create + /sources/{sourceId}#Iterable: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceIterable" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Iterable#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceIterablePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceIterable" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Iterable#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceIterable" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Iterable#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Jira: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceJiraCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceJira" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jira#create + /sources/{sourceId}#Jira: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceJira" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jira#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceJiraPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceJira" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jira#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceJira" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jira#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Jotform: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceJotformCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceJotform" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jotform#create + /sources/{sourceId}#Jotform: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceJotform" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jotform#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceJotformPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceJotform" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jotform#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceJotform" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Jotform#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#K6Cloud: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceK6CloudCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceK6Cloud" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_K6Cloud#create + /sources/{sourceId}#K6Cloud: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceK6Cloud" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_K6Cloud#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceK6CloudPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceK6Cloud" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_K6Cloud#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceK6Cloud" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_K6Cloud#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Kissmetrics: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKissmetricsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceKissmetrics" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kissmetrics#create + /sources/{sourceId}#Kissmetrics: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceKissmetrics" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kissmetrics#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKissmetricsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceKissmetrics" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kissmetrics#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceKissmetrics" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kissmetrics#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Klarna: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKlarnaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceKlarna" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klarna#create + /sources/{sourceId}#Klarna: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceKlarna" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klarna#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKlarnaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceKlarna" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klarna#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceKlarna" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klarna#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Klaviyo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKlaviyoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceKlaviyo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klaviyo#create + /sources/{sourceId}#Klaviyo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceKlaviyo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klaviyo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKlaviyoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceKlaviyo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klaviyo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceKlaviyo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Klaviyo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Kyve: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKyveCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceKyve" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kyve#create + /sources/{sourceId}#Kyve: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceKyve" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kyve#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceKyvePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceKyve" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kyve#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceKyve" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Kyve#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Launchdarkly: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLaunchdarklyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLaunchdarkly" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Launchdarkly#create + /sources/{sourceId}#Launchdarkly: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLaunchdarkly" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Launchdarkly#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLaunchdarklyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLaunchdarkly" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Launchdarkly#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLaunchdarkly" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Launchdarkly#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Leadfeeder: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLeadfeederCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLeadfeeder" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Leadfeeder#create + /sources/{sourceId}#Leadfeeder: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLeadfeeder" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Leadfeeder#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLeadfeederPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLeadfeeder" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Leadfeeder#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLeadfeeder" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Leadfeeder#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Lemlist: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLemlistCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLemlist" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lemlist#create + /sources/{sourceId}#Lemlist: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLemlist" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lemlist#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLemlistPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLemlist" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lemlist#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLemlist" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lemlist#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#LeverHiring: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLeverHiringCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLeverHiring" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LeverHiring#create + /sources/{sourceId}#LeverHiring: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLeverHiring" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LeverHiring#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLeverHiringPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLeverHiring" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LeverHiring#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLeverHiring" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LeverHiring#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#LinkedinAds: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLinkedinAdsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLinkedinAds" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinAds#create + /sources/{sourceId}#LinkedinAds: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLinkedinAds" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinAds#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLinkedinAdsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLinkedinAds" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinAds#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLinkedinAds" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinAds#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#LinkedinPages: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLinkedinPagesCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLinkedinPages" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinPages#create + /sources/{sourceId}#LinkedinPages: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLinkedinPages" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinPages#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLinkedinPagesPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLinkedinPages" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinPages#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLinkedinPages" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_LinkedinPages#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Linnworks: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLinnworksCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLinnworks" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Linnworks#create + /sources/{sourceId}#Linnworks: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLinnworks" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Linnworks#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLinnworksPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLinnworks" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Linnworks#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLinnworks" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Linnworks#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Lob: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLobCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLob" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lob#create + /sources/{sourceId}#Lob: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLob" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lob#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLobPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLob" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lob#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLob" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lob#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Lokalise: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLokaliseCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLokalise" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lokalise#create + /sources/{sourceId}#Lokalise: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLokalise" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lokalise#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLokalisePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLokalise" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lokalise#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLokalise" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Lokalise#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Looker: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLookerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLooker" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Looker#create + /sources/{sourceId}#Looker: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLooker" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Looker#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLookerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLooker" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Looker#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLooker" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Looker#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Luma: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLumaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceLuma" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Luma#create + /sources/{sourceId}#Luma: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceLuma" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Luma#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceLumaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceLuma" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Luma#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceLuma" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Luma#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mailchimp: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMailchimpCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMailchimp" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailchimp#create + /sources/{sourceId}#Mailchimp: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMailchimp" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailchimp#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMailchimpPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMailchimp" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailchimp#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMailchimp" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailchimp#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mailgun: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMailgunCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMailgun" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailgun#create + /sources/{sourceId}#Mailgun: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMailgun" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailgun#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMailgunPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMailgun" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailgun#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMailgun" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mailgun#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MailjetSms: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMailjetSmsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMailjetSms" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MailjetSms#create + /sources/{sourceId}#MailjetSms: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMailjetSms" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MailjetSms#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMailjetSmsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMailjetSms" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MailjetSms#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMailjetSms" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MailjetSms#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Marketo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMarketoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMarketo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Marketo#create + /sources/{sourceId}#Marketo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMarketo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Marketo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMarketoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMarketo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Marketo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMarketo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Marketo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Metabase: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMetabaseCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMetabase" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Metabase#create + /sources/{sourceId}#Metabase: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMetabase" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Metabase#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMetabasePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMetabase" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Metabase#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMetabase" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Metabase#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MicrosoftOnedrive: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMicrosoftOnedriveCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMicrosoftOnedrive" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#create + /sources/{sourceId}#MicrosoftOnedrive: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMicrosoftOnedrive" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMicrosoftOnedrivePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMicrosoftOnedrive" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMicrosoftOnedrive" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftOnedrive#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MicrosoftSharepoint: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMicrosoftSharepointCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMicrosoftSharepoint" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#create + /sources/{sourceId}#MicrosoftSharepoint: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMicrosoftSharepoint" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMicrosoftSharepointPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMicrosoftSharepoint" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMicrosoftSharepoint" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftSharepoint#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MicrosoftTeams: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMicrosoftTeamsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMicrosoftTeams" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftTeams#create + /sources/{sourceId}#MicrosoftTeams: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMicrosoftTeams" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftTeams#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMicrosoftTeamsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMicrosoftTeams" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftTeams#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMicrosoftTeams" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MicrosoftTeams#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mixpanel: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMixpanelCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMixpanel" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mixpanel#create + /sources/{sourceId}#Mixpanel: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMixpanel" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mixpanel#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMixpanelPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMixpanel" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mixpanel#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMixpanel" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mixpanel#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Monday: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMondayCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMonday" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#create + /sources/{sourceId}#Monday: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMonday" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMondayPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMonday" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMonday" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Monday#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MongodbV2: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMongodbV2CreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMongodbV2" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#create + /sources/{sourceId}#MongodbV2: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMongodbV2" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMongodbV2PutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMongodbV2" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMongodbV2" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MongodbV2#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mssql: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMssqlCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMssql" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#create + /sources/{sourceId}#Mssql: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMssql" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMssqlPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMssql" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMssql" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mssql#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#MyHours: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMyHoursCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMyHours" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#create + /sources/{sourceId}#MyHours: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMyHours" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMyHoursPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMyHours" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMyHours" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_MyHours#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Mysql: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMysqlCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceMysql" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#create + /sources/{sourceId}#Mysql: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceMysql" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceMysqlPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceMysql" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceMysql" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Mysql#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Netsuite: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNetsuiteCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNetsuite" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#create + /sources/{sourceId}#Netsuite: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNetsuite" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNetsuitePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNetsuite" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNetsuite" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Netsuite#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#NorthpassLms: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNorthpassLmsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNorthpassLms" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#create + /sources/{sourceId}#NorthpassLms: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNorthpassLms" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNorthpassLmsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNorthpassLms" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNorthpassLms" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_NorthpassLms#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Notion: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNotionCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNotion" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#create + /sources/{sourceId}#Notion: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNotion" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNotionPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNotion" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNotion" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Notion#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nylas: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNylasCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNylas" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#create + /sources/{sourceId}#Nylas: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNylas" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNylasPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNylas" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNylas" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nylas#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Nytimes: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNytimesCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceNytimes" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#create + /sources/{sourceId}#Nytimes: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceNytimes" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceNytimesPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceNytimes" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceNytimes" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Nytimes#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Okta: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOktaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOkta" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#create + /sources/{sourceId}#Okta: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOkta" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOktaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOkta" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOkta" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Okta#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Omnisend: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOmnisendCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOmnisend" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#create + /sources/{sourceId}#Omnisend: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOmnisend" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOmnisendPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOmnisend" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOmnisend" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Omnisend#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Onesignal: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnesignalCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOnesignal" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#create + /sources/{sourceId}#Onesignal: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOnesignal" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOnesignalPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOnesignal" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOnesignal" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Onesignal#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Oracle: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOracleCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOracle" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#create + /sources/{sourceId}#Oracle: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOracle" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOraclePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOracle" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOracle" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Oracle#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Orb: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOrb" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#create + /sources/{sourceId}#Orb: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOrb" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOrb" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOrb" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orb#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Orbit: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbitCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOrbit" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#create + /sources/{sourceId}#Orbit: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOrbit" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOrbitPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOrbit" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOrbit" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Orbit#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#OutbrainAmplify: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutbrainAmplifyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOutbrainAmplify" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#create + /sources/{sourceId}#OutbrainAmplify: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOutbrainAmplify" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutbrainAmplifyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOutbrainAmplify" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOutbrainAmplify" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_OutbrainAmplify#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Outreach: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutreachCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceOutreach" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#create + /sources/{sourceId}#Outreach: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceOutreach" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceOutreachPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceOutreach" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceOutreach" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Outreach#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PaypalTransaction: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaypalTransactionCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePaypalTransaction" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#create + /sources/{sourceId}#PaypalTransaction: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePaypalTransaction" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaypalTransactionPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePaypalTransaction" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePaypalTransaction" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PaypalTransaction#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Paystack: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaystackCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePaystack" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#create + /sources/{sourceId}#Paystack: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePaystack" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePaystackPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePaystack" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePaystack" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Paystack#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pendo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePendoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePendo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#create + /sources/{sourceId}#Pendo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePendo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePendoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePendo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePendo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pendo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pennylane: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePennylaneCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePennylane" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#create + /sources/{sourceId}#Pennylane: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePennylane" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePennylanePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePennylane" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePennylane" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pennylane#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Persistiq: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePersistiqCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePersistiq" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#create + /sources/{sourceId}#Persistiq: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePersistiq" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePersistiqPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePersistiq" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePersistiq" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Persistiq#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PexelsApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePexelsApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePexelsApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#create + /sources/{sourceId}#PexelsApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePexelsApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePexelsApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePexelsApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePexelsApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PexelsApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Picqer: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePicqerCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePicqer" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#create + /sources/{sourceId}#Picqer: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePicqer" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePicqerPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePicqer" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePicqer" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Picqer#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pinterest: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePinterestCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePinterest" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#create + /sources/{sourceId}#Pinterest: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePinterest" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePinterestPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePinterest" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePinterest" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pinterest#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pipedrive: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePipedriveCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePipedrive" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#create + /sources/{sourceId}#Pipedrive: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePipedrive" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePipedrivePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePipedrive" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePipedrive" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pipedrive#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Piwik: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePiwikCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePiwik" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#create + /sources/{sourceId}#Piwik: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePiwik" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePiwikPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePiwik" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePiwik" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Piwik#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Planhat: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlanhatCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePlanhat" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#create + /sources/{sourceId}#Planhat: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePlanhat" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePlanhatPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePlanhat" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePlanhat" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Planhat#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pocket: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePocketCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePocket" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#create + /sources/{sourceId}#Pocket: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePocket" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePocketPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePocket" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePocket" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pocket#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pokeapi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePokeapiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePokeapi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#create + /sources/{sourceId}#Pokeapi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePokeapi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePokeapiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePokeapi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePokeapi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pokeapi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#PolygonStockApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePolygonStockApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePolygonStockApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#create + /sources/{sourceId}#PolygonStockApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePolygonStockApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePolygonStockApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePolygonStockApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePolygonStockApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_PolygonStockApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Postgres: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostgresCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePostgres" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#create + /sources/{sourceId}#Postgres: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePostgres" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostgresPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePostgres" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePostgres" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postgres#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Posthog: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePosthogCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePosthog" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#create + /sources/{sourceId}#Posthog: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePosthog" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePosthogPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePosthog" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePosthog" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Posthog#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Postmarkapp: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostmarkappCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePostmarkapp" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#create + /sources/{sourceId}#Postmarkapp: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePostmarkapp" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePostmarkappPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePostmarkapp" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePostmarkapp" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Postmarkapp#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Prestashop: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePrestashopCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePrestashop" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#create + /sources/{sourceId}#Prestashop: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePrestashop" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePrestashopPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePrestashop" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePrestashop" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Prestashop#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Productboard: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceProductboardCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceProductboard" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productboard#create + /sources/{sourceId}#Productboard: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceProductboard" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productboard#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceProductboardPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceProductboard" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productboard#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceProductboard" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productboard#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Productive: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceProductiveCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceProductive" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productive#create + /sources/{sourceId}#Productive: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceProductive" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productive#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceProductivePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceProductive" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productive#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceProductive" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Productive#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Pypi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePypiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourcePypi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pypi#create + /sources/{sourceId}#Pypi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourcePypi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pypi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourcePypiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourcePypi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pypi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourcePypi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Pypi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Qualaroo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceQualarooCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceQualaroo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Qualaroo#create + /sources/{sourceId}#Qualaroo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceQualaroo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Qualaroo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceQualarooPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceQualaroo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Qualaroo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceQualaroo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Qualaroo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Railz: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRailzCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRailz" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#create + /sources/{sourceId}#Railz: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRailz" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRailzPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRailz" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRailz" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Railz#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Recharge: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRechargeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRecharge" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#create + /sources/{sourceId}#Recharge: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRecharge" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRechargePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRecharge" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRecharge" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recharge#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Recreation: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecreationCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRecreation" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recreation#create + /sources/{sourceId}#Recreation: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRecreation" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recreation#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecreationPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRecreation" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recreation#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRecreation" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recreation#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Recruitee: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecruiteeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRecruitee" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recruitee#create + /sources/{sourceId}#Recruitee: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRecruitee" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recruitee#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecruiteePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRecruitee" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recruitee#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRecruitee" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recruitee#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Recurly: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecurlyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRecurly" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recurly#create + /sources/{sourceId}#Recurly: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRecurly" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recurly#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRecurlyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRecurly" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recurly#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRecurly" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Recurly#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Reddit: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRedditCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceReddit" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Reddit#create + /sources/{sourceId}#Reddit: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceReddit" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Reddit#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRedditPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceReddit" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Reddit#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceReddit" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Reddit#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Redshift: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRedshiftCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRedshift" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Redshift#create + /sources/{sourceId}#Redshift: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRedshift" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Redshift#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRedshiftPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRedshift" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Redshift#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRedshift" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Redshift#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Retently: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRetentlyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRetently" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Retently#create + /sources/{sourceId}#Retently: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRetently" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Retently#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRetentlyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRetently" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Retently#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRetently" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Retently#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#RkiCovid: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRkiCovidCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRkiCovid" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RkiCovid#create + /sources/{sourceId}#RkiCovid: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRkiCovid" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RkiCovid#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRkiCovidPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRkiCovid" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RkiCovid#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRkiCovid" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_RkiCovid#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Rollbar: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRollbarCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRollbar" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rollbar#create + /sources/{sourceId}#Rollbar: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRollbar" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rollbar#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRollbarPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRollbar" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rollbar#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRollbar" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rollbar#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Rss: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRssCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceRss" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rss#create + /sources/{sourceId}#Rss: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceRss" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rss#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceRssPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceRss" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rss#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceRss" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Rss#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#S3: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceS3CreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceS3" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_S3#create + /sources/{sourceId}#S3: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceS3" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_S3#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceS3PutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceS3" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_S3#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceS3" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_S3#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Salesforce: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSalesforceCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSalesforce" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesforce#create + /sources/{sourceId}#Salesforce: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSalesforce" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesforce#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSalesforcePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSalesforce" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesforce#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSalesforce" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesforce#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Salesloft: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSalesloftCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSalesloft" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesloft#create + /sources/{sourceId}#Salesloft: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSalesloft" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesloft#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSalesloftPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSalesloft" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesloft#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSalesloft" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Salesloft#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SapFieldglass: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSapFieldglassCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSapFieldglass" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SapFieldglass#create + /sources/{sourceId}#SapFieldglass: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSapFieldglass" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SapFieldglass#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSapFieldglassPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSapFieldglass" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SapFieldglass#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSapFieldglass" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SapFieldglass#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Savvycal: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSavvycalCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSavvycal" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Savvycal#create + /sources/{sourceId}#Savvycal: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSavvycal" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Savvycal#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSavvycalPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSavvycal" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Savvycal#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSavvycal" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Savvycal#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Scryfall: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceScryfallCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceScryfall" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Scryfall#create + /sources/{sourceId}#Scryfall: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceScryfall" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Scryfall#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceScryfallPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceScryfall" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Scryfall#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceScryfall" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Scryfall#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Secoda: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSecodaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSecoda" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Secoda#create + /sources/{sourceId}#Secoda: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSecoda" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Secoda#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSecodaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSecoda" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Secoda#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSecoda" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Secoda#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Sendgrid: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSendgridCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSendgrid" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendgrid#create + /sources/{sourceId}#Sendgrid: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSendgrid" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendgrid#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSendgridPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSendgrid" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendgrid#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSendgrid" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendgrid#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Sendinblue: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSendinblueCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSendinblue" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendinblue#create + /sources/{sourceId}#Sendinblue: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSendinblue" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendinblue#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSendinbluePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSendinblue" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendinblue#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSendinblue" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sendinblue#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Senseforce: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSenseforceCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSenseforce" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Senseforce#create + /sources/{sourceId}#Senseforce: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSenseforce" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Senseforce#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSenseforcePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSenseforce" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Senseforce#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSenseforce" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Senseforce#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Sentry: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSentryCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSentry" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sentry#create + /sources/{sourceId}#Sentry: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSentry" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sentry#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSentryPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSentry" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sentry#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSentry" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sentry#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Sftp: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSftpCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSftp" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sftp#create + /sources/{sourceId}#Sftp: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSftp" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sftp#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSftpPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSftp" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sftp#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSftp" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Sftp#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SftpBulk: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSftpBulkCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSftpBulk" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SftpBulk#create + /sources/{sourceId}#SftpBulk: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSftpBulk" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SftpBulk#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSftpBulkPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSftpBulk" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SftpBulk#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSftpBulk" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SftpBulk#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Shopify: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceShopifyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceShopify" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shopify#create + /sources/{sourceId}#Shopify: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceShopify" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shopify#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceShopifyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceShopify" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shopify#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceShopify" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shopify#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Shortcut: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceShortcutCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceShortcut" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortcut#create + /sources/{sourceId}#Shortcut: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceShortcut" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortcut#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceShortcutPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceShortcut" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortcut#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceShortcut" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortcut#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Shortio: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceShortioCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceShortio" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortio#create + /sources/{sourceId}#Shortio: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceShortio" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortio#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceShortioPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceShortio" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortio#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceShortio" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Shortio#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Slack: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSlackCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSlack" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Slack#create + /sources/{sourceId}#Slack: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSlack" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Slack#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSlackPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSlack" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Slack#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSlack" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Slack#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Smaily: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSmailyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSmaily" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smaily#create + /sources/{sourceId}#Smaily: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSmaily" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smaily#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSmailyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSmaily" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smaily#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSmaily" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smaily#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Smartengage: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSmartengageCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSmartengage" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartengage#create + /sources/{sourceId}#Smartengage: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSmartengage" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartengage#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSmartengagePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSmartengage" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartengage#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSmartengage" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartengage#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Smartsheets: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSmartsheetsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSmartsheets" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartsheets#create + /sources/{sourceId}#Smartsheets: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSmartsheets" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartsheets#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSmartsheetsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSmartsheets" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartsheets#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSmartsheets" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Smartsheets#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SnapchatMarketing: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSnapchatMarketingCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSnapchatMarketing" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SnapchatMarketing#create + /sources/{sourceId}#SnapchatMarketing: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSnapchatMarketing" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SnapchatMarketing#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSnapchatMarketingPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSnapchatMarketing" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SnapchatMarketing#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSnapchatMarketing" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SnapchatMarketing#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Snowflake: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSnowflakeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSnowflake" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Snowflake#create + /sources/{sourceId}#Snowflake: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSnowflake" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Snowflake#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSnowflakePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSnowflake" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Snowflake#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSnowflake" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Snowflake#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SonarCloud: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSonarCloudCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSonarCloud" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SonarCloud#create + /sources/{sourceId}#SonarCloud: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSonarCloud" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SonarCloud#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSonarCloudPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSonarCloud" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SonarCloud#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSonarCloud" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SonarCloud#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SpacexApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSpacexApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSpacexApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SpacexApi#create + /sources/{sourceId}#SpacexApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSpacexApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SpacexApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSpacexApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSpacexApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SpacexApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSpacexApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SpacexApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SplitIo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSplitIoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSplitIo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SplitIo#create + /sources/{sourceId}#SplitIo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSplitIo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SplitIo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSplitIoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSplitIo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SplitIo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSplitIo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SplitIo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Square: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSquareCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSquare" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Square#create + /sources/{sourceId}#Square: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSquare" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Square#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSquarePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSquare" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Square#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSquare" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Square#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Strava: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceStravaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceStrava" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Strava#create + /sources/{sourceId}#Strava: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceStrava" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Strava#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceStravaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceStrava" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Strava#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceStrava" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Strava#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Stripe: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceStripeCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceStripe" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Stripe#create + /sources/{sourceId}#Stripe: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceStripe" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Stripe#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceStripePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceStripe" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Stripe#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceStripe" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Stripe#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#SurveySparrow: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSurveySparrowCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSurveySparrow" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SurveySparrow#create + /sources/{sourceId}#SurveySparrow: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSurveySparrow" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SurveySparrow#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSurveySparrowPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSurveySparrow" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SurveySparrow#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSurveySparrow" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_SurveySparrow#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Surveymonkey: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSurveymonkeyCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSurveymonkey" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Surveymonkey#create + /sources/{sourceId}#Surveymonkey: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSurveymonkey" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Surveymonkey#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSurveymonkeyPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSurveymonkey" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Surveymonkey#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSurveymonkey" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Surveymonkey#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Survicate: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSurvicateCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceSurvicate" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Survicate#create + /sources/{sourceId}#Survicate: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceSurvicate" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Survicate#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceSurvicatePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceSurvicate" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Survicate#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceSurvicate" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Survicate#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Teamwork: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTeamworkCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTeamwork" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Teamwork#create + /sources/{sourceId}#Teamwork: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTeamwork" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Teamwork#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTeamworkPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTeamwork" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Teamwork#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTeamwork" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Teamwork#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Tempo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTempoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTempo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Tempo#create + /sources/{sourceId}#Tempo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTempo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Tempo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTempoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTempo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Tempo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTempo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Tempo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#TheGuardianApi: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTheGuardianApiCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTheGuardianApi" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TheGuardianApi#create + /sources/{sourceId}#TheGuardianApi: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTheGuardianApi" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TheGuardianApi#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTheGuardianApiPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTheGuardianApi" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TheGuardianApi#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTheGuardianApi" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TheGuardianApi#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#TiktokMarketing: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTiktokMarketingCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTiktokMarketing" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TiktokMarketing#create + /sources/{sourceId}#TiktokMarketing: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTiktokMarketing" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TiktokMarketing#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTiktokMarketingPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTiktokMarketing" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TiktokMarketing#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTiktokMarketing" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TiktokMarketing#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Trello: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTrelloCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTrello" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trello#create + /sources/{sourceId}#Trello: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTrello" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trello#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTrelloPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTrello" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trello#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTrello" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trello#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Trustpilot: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTrustpilotCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTrustpilot" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trustpilot#create + /sources/{sourceId}#Trustpilot: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTrustpilot" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trustpilot#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTrustpilotPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTrustpilot" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trustpilot#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTrustpilot" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Trustpilot#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#TvmazeSchedule: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTvmazeScheduleCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTvmazeSchedule" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TvmazeSchedule#create + /sources/{sourceId}#TvmazeSchedule: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTvmazeSchedule" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TvmazeSchedule#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTvmazeSchedulePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTvmazeSchedule" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TvmazeSchedule#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTvmazeSchedule" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TvmazeSchedule#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Twilio: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTwilioCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTwilio" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twilio#create + /sources/{sourceId}#Twilio: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTwilio" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twilio#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTwilioPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTwilio" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twilio#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTwilio" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twilio#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#TwilioTaskrouter: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTwilioTaskrouterCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTwilioTaskrouter" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TwilioTaskrouter#create + /sources/{sourceId}#TwilioTaskrouter: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTwilioTaskrouter" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TwilioTaskrouter#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTwilioTaskrouterPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTwilioTaskrouter" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TwilioTaskrouter#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTwilioTaskrouter" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_TwilioTaskrouter#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Twitter: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTwitterCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTwitter" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twitter#create + /sources/{sourceId}#Twitter: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTwitter" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twitter#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTwitterPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTwitter" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twitter#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTwitter" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Twitter#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Typeform: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTypeformCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceTypeform" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Typeform#create + /sources/{sourceId}#Typeform: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceTypeform" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Typeform#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceTypeformPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceTypeform" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Typeform#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceTypeform" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Typeform#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#UsCensus: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceUsCensusCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceUsCensus" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_UsCensus#create + /sources/{sourceId}#UsCensus: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceUsCensus" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_UsCensus#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceUsCensusPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceUsCensus" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_UsCensus#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceUsCensus" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_UsCensus#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Vantage: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceVantageCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceVantage" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vantage#create + /sources/{sourceId}#Vantage: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceVantage" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vantage#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceVantagePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceVantage" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vantage#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceVantage" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vantage#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Vwo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceVwoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceVwo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vwo#create + /sources/{sourceId}#Vwo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceVwo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vwo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceVwoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceVwo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vwo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceVwo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Vwo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Webflow: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWebflowCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceWebflow" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Webflow#create + /sources/{sourceId}#Webflow: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceWebflow" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Webflow#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWebflowPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceWebflow" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Webflow#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceWebflow" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Webflow#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#WhenIWork: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWhenIWorkCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceWhenIWork" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhenIWork#create + /sources/{sourceId}#WhenIWork: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceWhenIWork" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhenIWork#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWhenIWorkPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceWhenIWork" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhenIWork#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceWhenIWork" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhenIWork#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#WhiskyHunter: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWhiskyHunterCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceWhiskyHunter" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhiskyHunter#create + /sources/{sourceId}#WhiskyHunter: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceWhiskyHunter" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhiskyHunter#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWhiskyHunterPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceWhiskyHunter" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhiskyHunter#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceWhiskyHunter" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WhiskyHunter#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#WikipediaPageviews: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWikipediaPageviewsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceWikipediaPageviews" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WikipediaPageviews#create + /sources/{sourceId}#WikipediaPageviews: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceWikipediaPageviews" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WikipediaPageviews#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWikipediaPageviewsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceWikipediaPageviews" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WikipediaPageviews#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceWikipediaPageviews" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_WikipediaPageviews#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Woocommerce: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWoocommerceCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceWoocommerce" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Woocommerce#create + /sources/{sourceId}#Woocommerce: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceWoocommerce" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Woocommerce#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceWoocommercePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceWoocommerce" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Woocommerce#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceWoocommerce" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Woocommerce#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Xkcd: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceXkcdCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceXkcd" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Xkcd#create + /sources/{sourceId}#Xkcd: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceXkcd" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Xkcd#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceXkcdPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceXkcd" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Xkcd#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceXkcd" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Xkcd#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#YandexMetrica: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceYandexMetricaCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceYandexMetrica" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YandexMetrica#create + /sources/{sourceId}#YandexMetrica: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceYandexMetrica" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YandexMetrica#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceYandexMetricaPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceYandexMetrica" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YandexMetrica#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceYandexMetrica" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YandexMetrica#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Yotpo: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceYotpoCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceYotpo" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Yotpo#create + /sources/{sourceId}#Yotpo: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceYotpo" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Yotpo#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceYotpoPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceYotpo" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Yotpo#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceYotpo" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Yotpo#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#YoutubeAnalytics: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceYoutubeAnalyticsCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceYoutubeAnalytics" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YoutubeAnalytics#create + /sources/{sourceId}#YoutubeAnalytics: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceYoutubeAnalytics" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YoutubeAnalytics#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceYoutubeAnalyticsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceYoutubeAnalytics" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YoutubeAnalytics#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceYoutubeAnalytics" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_YoutubeAnalytics#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ZendeskChat: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskChatCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZendeskChat" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskChat#create + /sources/{sourceId}#ZendeskChat: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZendeskChat" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskChat#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskChatPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZendeskChat" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskChat#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZendeskChat" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskChat#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ZendeskSunshine: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskSunshineCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZendeskSunshine" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSunshine#create + /sources/{sourceId}#ZendeskSunshine: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZendeskSunshine" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSunshine#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskSunshinePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZendeskSunshine" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSunshine#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZendeskSunshine" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSunshine#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ZendeskSupport: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskSupportCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZendeskSupport" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSupport#create + /sources/{sourceId}#ZendeskSupport: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZendeskSupport" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSupport#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskSupportPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZendeskSupport" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSupport#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZendeskSupport" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskSupport#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ZendeskTalk: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskTalkCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZendeskTalk" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskTalk#create + /sources/{sourceId}#ZendeskTalk: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZendeskTalk" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskTalk#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZendeskTalkPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZendeskTalk" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskTalk#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZendeskTalk" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZendeskTalk#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Zenloop: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZenloopCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZenloop" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zenloop#create + /sources/{sourceId}#Zenloop: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZenloop" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zenloop#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZenloopPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZenloop" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zenloop#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZenloop" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zenloop#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#ZohoCrm: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZohoCrmCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZohoCrm" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoCrm#create + /sources/{sourceId}#ZohoCrm: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZohoCrm" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoCrm#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZohoCrmPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZohoCrm" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoCrm#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZohoCrm" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_ZohoCrm#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Zoom: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZoomCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceZoom" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zoom#create + /sources/{sourceId}#Zoom: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceZoom" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zoom#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceZoomPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceZoom" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zoom#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceZoom" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Zoom#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /sources#Custom: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCustomCreateRequest" + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createSourceCustom" + summary: "Create a source" + description: + "Creates a source given a name, workspace id, and a json blob containing\ + \ the configuration for the source." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Custom#create + /sources/{sourceId}#Custom: + get: + tags: + - "Sources" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/SourceResponse" + description: "Get a Source by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getSourceCustom" + summary: "Get Source details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Custom#read + put: + tags: + - "Sources" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/SourceCustomPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putSourceCustom" + summary: "Update a Source fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Custom#update + delete: + tags: + - "Sources" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteSourceCustom" + summary: "Delete a Source" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Source_Custom#delete + parameters: + - name: "sourceId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Astra: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationAstraCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationAstra" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Astra#create + /destinations/{destinationId}#Astra: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationAstra" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Astra#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationAstraPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationAstra" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Astra#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationAstra" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Astra#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#AwsDatalake: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationAwsDatalakeCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationAwsDatalake" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AwsDatalake#create + /destinations/{destinationId}#AwsDatalake: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationAwsDatalake" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AwsDatalake#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationAwsDatalakePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationAwsDatalake" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AwsDatalake#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationAwsDatalake" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AwsDatalake#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#AzureBlobStorage: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationAzureBlobStorageCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationAzureBlobStorage" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AzureBlobStorage#create + /destinations/{destinationId}#AzureBlobStorage: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationAzureBlobStorage" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AzureBlobStorage#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationAzureBlobStoragePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationAzureBlobStorage" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AzureBlobStorage#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationAzureBlobStorage" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_AzureBlobStorage#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Bigquery: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationBigqueryCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationBigquery" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Bigquery#create + /destinations/{destinationId}#Bigquery: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationBigquery" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Bigquery#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationBigqueryPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationBigquery" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Bigquery#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationBigquery" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Bigquery#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Clickhouse: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationClickhouseCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationClickhouse" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Clickhouse#create + /destinations/{destinationId}#Clickhouse: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationClickhouse" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Clickhouse#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationClickhousePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationClickhouse" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Clickhouse#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationClickhouse" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Clickhouse#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Convex: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationConvexCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationConvex" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Convex#create + /destinations/{destinationId}#Convex: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationConvex" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Convex#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationConvexPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationConvex" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Convex#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationConvex" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Convex#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Databricks: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDatabricksCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationDatabricks" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Databricks#create + /destinations/{destinationId}#Databricks: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationDatabricks" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Databricks#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDatabricksPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationDatabricks" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Databricks#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationDatabricks" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Databricks#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#DevNull: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDevNullCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationDevNull" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_DevNull#create + /destinations/{destinationId}#DevNull: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationDevNull" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_DevNull#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDevNullPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationDevNull" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_DevNull#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationDevNull" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_DevNull#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Duckdb: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDuckdbCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationDuckdb" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Duckdb#create + /destinations/{destinationId}#Duckdb: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationDuckdb" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Duckdb#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDuckdbPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationDuckdb" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Duckdb#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationDuckdb" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Duckdb#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Dynamodb: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDynamodbCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationDynamodb" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Dynamodb#create + /destinations/{destinationId}#Dynamodb: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationDynamodb" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Dynamodb#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationDynamodbPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationDynamodb" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Dynamodb#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationDynamodb" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Dynamodb#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Elasticsearch: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationElasticsearchCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationElasticsearch" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Elasticsearch#create + /destinations/{destinationId}#Elasticsearch: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationElasticsearch" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Elasticsearch#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationElasticsearchPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationElasticsearch" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Elasticsearch#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationElasticsearch" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Elasticsearch#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Firebolt: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationFireboltCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationFirebolt" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firebolt#create + /destinations/{destinationId}#Firebolt: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationFirebolt" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firebolt#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationFireboltPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationFirebolt" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firebolt#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationFirebolt" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firebolt#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Firestore: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationFirestoreCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationFirestore" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firestore#create + /destinations/{destinationId}#Firestore: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationFirestore" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firestore#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationFirestorePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationFirestore" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firestore#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationFirestore" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Firestore#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Gcs: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationGcsCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationGcs" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Gcs#create + /destinations/{destinationId}#Gcs: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationGcs" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Gcs#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationGcsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationGcs" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Gcs#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationGcs" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Gcs#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#GoogleSheets: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationGoogleSheetsCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationGoogleSheets" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_GoogleSheets#create + /destinations/{destinationId}#GoogleSheets: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationGoogleSheets" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_GoogleSheets#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationGoogleSheetsPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationGoogleSheets" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_GoogleSheets#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationGoogleSheets" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_GoogleSheets#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Iceberg: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationIcebergCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationIceberg" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Iceberg#create + /destinations/{destinationId}#Iceberg: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationIceberg" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Iceberg#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationIcebergPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationIceberg" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Iceberg#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationIceberg" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Iceberg#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Milvus: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMilvusCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationMilvus" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#create + /destinations/{destinationId}#Milvus: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationMilvus" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMilvusPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationMilvus" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationMilvus" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Mongodb: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMongodbCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationMongodb" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mongodb#create + /destinations/{destinationId}#Mongodb: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationMongodb" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mongodb#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMongodbPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationMongodb" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mongodb#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationMongodb" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mongodb#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Mssql: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMssqlCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationMssql" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mssql#create + /destinations/{destinationId}#Mssql: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationMssql" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mssql#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMssqlPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationMssql" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mssql#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationMssql" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mssql#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Mysql: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMysqlCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationMysql" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mysql#create + /destinations/{destinationId}#Mysql: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationMysql" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mysql#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMysqlPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationMysql" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mysql#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationMysql" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Mysql#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Oracle: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationOracleCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationOracle" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Oracle#create + /destinations/{destinationId}#Oracle: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationOracle" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Oracle#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationOraclePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationOracle" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Oracle#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationOracle" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Oracle#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Pgvector: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPgvectorCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationPgvector" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pgvector#create + /destinations/{destinationId}#Pgvector: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationPgvector" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pgvector#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPgvectorPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationPgvector" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pgvector#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationPgvector" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pgvector#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Pinecone: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPineconeCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationPinecone" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#create + /destinations/{destinationId}#Pinecone: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationPinecone" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPineconePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationPinecone" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationPinecone" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Postgres: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPostgresCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationPostgres" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Postgres#create + /destinations/{destinationId}#Postgres: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationPostgres" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Postgres#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPostgresPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationPostgres" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Postgres#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationPostgres" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Postgres#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Pubsub: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPubsubCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationPubsub" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pubsub#create + /destinations/{destinationId}#Pubsub: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationPubsub" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pubsub#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPubsubPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationPubsub" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pubsub#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationPubsub" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pubsub#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Qdrant: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationQdrantCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationQdrant" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Qdrant#create + /destinations/{destinationId}#Qdrant: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationQdrant" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Qdrant#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationQdrantPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationQdrant" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Qdrant#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationQdrant" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Qdrant#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Redis: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationRedisCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationRedis" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redis#create + /destinations/{destinationId}#Redis: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationRedis" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redis#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationRedisPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationRedis" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redis#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationRedis" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redis#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Redshift: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationRedshiftCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationRedshift" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redshift#create + /destinations/{destinationId}#Redshift: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationRedshift" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redshift#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationRedshiftPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationRedshift" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redshift#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationRedshift" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Redshift#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#S3: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationS3CreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationS3" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3#create + /destinations/{destinationId}#S3: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationS3" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationS3PutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationS3" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationS3" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#S3Glue: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationS3GlueCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationS3Glue" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3Glue#create + /destinations/{destinationId}#S3Glue: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationS3Glue" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3Glue#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationS3GluePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationS3Glue" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3Glue#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationS3Glue" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_S3Glue#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#SftpJson: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationSftpJsonCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationSftpJson" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SftpJson#create + /destinations/{destinationId}#SftpJson: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationSftpJson" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SftpJson#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationSftpJsonPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationSftpJson" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SftpJson#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationSftpJson" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SftpJson#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Snowflake: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationSnowflakeCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationSnowflake" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Snowflake#create + /destinations/{destinationId}#Snowflake: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationSnowflake" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Snowflake#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationSnowflakePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationSnowflake" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Snowflake#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationSnowflake" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Snowflake#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#SnowflakeCortex: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationSnowflakeCortexCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationSnowflakeCortex" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SnowflakeCortex#create + /destinations/{destinationId}#SnowflakeCortex: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationSnowflakeCortex" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SnowflakeCortex#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationSnowflakeCortexPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationSnowflakeCortex" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SnowflakeCortex#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationSnowflakeCortex" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_SnowflakeCortex#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Teradata: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationTeradataCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationTeradata" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Teradata#create + /destinations/{destinationId}#Teradata: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationTeradata" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Teradata#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationTeradataPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationTeradata" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Teradata#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationTeradata" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Teradata#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Timeplus: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationTimeplusCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationTimeplus" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Timeplus#create + /destinations/{destinationId}#Timeplus: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationTimeplus" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Timeplus#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationTimeplusPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationTimeplus" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Timeplus#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationTimeplus" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Timeplus#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Typesense: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationTypesenseCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationTypesense" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Typesense#create + /destinations/{destinationId}#Typesense: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationTypesense" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Typesense#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationTypesensePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationTypesense" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Typesense#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationTypesense" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Typesense#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Vectara: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationVectaraCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationVectara" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Vectara#create + /destinations/{destinationId}#Vectara: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationVectara" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Vectara#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationVectaraPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationVectara" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Vectara#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationVectara" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Vectara#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Weaviate: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationWeaviateCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationWeaviate" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Weaviate#create + /destinations/{destinationId}#Weaviate: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationWeaviate" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Weaviate#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationWeaviatePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationWeaviate" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Weaviate#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationWeaviate" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Weaviate#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Yellowbrick: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationYellowbrickCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationYellowbrick" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Yellowbrick#create + /destinations/{destinationId}#Yellowbrick: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationYellowbrick" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Yellowbrick#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationYellowbrickPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationYellowbrick" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Yellowbrick#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationYellowbrick" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Yellowbrick#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true + /destinations#Custom: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationCustomCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationCustom" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Custom#create + /destinations/{destinationId}#Custom: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationCustom" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Custom#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationCustomPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationCustom" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Custom#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationCustom" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Custom#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true +components: + responses: + InitiateOauthResponse: + content: + application/json: {} + description: + "Response from the initiate OAuth call should be an object with\ + \ a single property which will be the `redirect_url`. If a user is redirected\ + \ to this URL, they'll be prompted by the identity provider to authenticate." + x-speakeasy-component: true + schemas: + WorkspaceId: + type: "string" + format: "uuid" + x-speakeasy-component: true + OrganizationId: + type: "string" + format: "uuid" + x-speakeasy-component: true + PermissionType: + type: "string" + description: "Describes what actions/endpoints the permission entitles to" + enum: + - "instance_admin" + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_owner" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + PublicPermissionType: + type: "string" + description: + "Subset of `PermissionType` (removing `instance_admin`), could\ + \ be used in public-api." + enum: + - "organization_admin" + - "organization_editor" + - "organization_reader" + - "organization_member" + - "workspace_admin" + - "workspace_editor" + - "workspace_reader" + x-speakeasy-component: true + UserId: + type: "string" + description: "Internal Airbyte user ID" + format: "uuid" + x-speakeasy-component: true + AuthProvider: + type: "string" + description: "Auth Provider" + default: "airbyte" + enum: + - "airbyte" + - "google_identity_platform" + - "keycloak" + x-speakeasy-component: true + UserStatus: + type: "string" + description: "user status" + enum: + - "invited" + - "registered" + - "disabled" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SelectedFieldInfo: + type: "object" + description: + "Path to a field/column/property in a stream to be selected. For\ + \ example, if the field to be selected is a database column called \"foo\"\ + , this will be [\"foo\"]. Use multiple path elements for nested schemas." + properties: + fieldPath: + type: "array" + items: + type: "string" + x-speakeasy-component: true + SelectedFields: + description: "Paths to the fields that will be included in the configured catalog." + type: "array" + items: + $ref: "#/components/schemas/SelectedFieldInfo" + x-speakeasy-component: true + OAuthConfiguration: + description: + "The values required to configure OAuth flows. The schema for this\ + \ must match the `OAuthConfigSpecification.oauthUserInputFromConnectorConfigSpecification`\ + \ schema." + x-speakeasy-component: true + OAuthInputConfiguration: + $ref: "#/components/schemas/OAuthConfiguration" + x-speakeasy-component: true + ApplicationCreate: + required: + - "name" + type: "object" + properties: + name: + type: "string" + x-speakeasy-component: true + ApplicationReadList: + required: + - "applications" + type: "object" + properties: + applications: + type: "array" + items: + $ref: "#/components/schemas/ApplicationRead" + x-speakeasy-component: true + ApplicationRead: + required: + - "id" + - "name" + - "clientId" + - "clientSecret" + - "createdAt" + type: "object" + properties: + id: + type: "string" + name: + type: "string" + clientId: + type: "string" + clientSecret: + type: "string" + createdAt: + type: "integer" + format: "int64" + x-speakeasy-component: true + ApplicationTokenRequestWithGrant: + required: + - "client_id" + - "client_secret" + - "grant_type" + type: "object" + properties: + client_id: + type: "string" + client_secret: + type: "string" + grant-type: + enum: + - "client_credentials" + x-speakeasy-component: true + PublicAccessTokenResponse: + required: + - "access_token" + - "token_type" + - "expires_in" + type: "object" + properties: + access_token: + type: "string" + token_type: + enum: + - "Bearer" + expires_in: + type: "integer" + format: "int64" + x-speakeasy-component: true + RedirectUrlResponse: + title: "Root Type for RedirectUrlResponse" + description: "" + type: "object" + properties: + redirectUrl: + format: "url" + type: "string" + example: + redirectUrl: "https://example.com" + x-speakeasy-component: true + JobResponse: + title: "Root Type for JobResponse" + description: "Provides details of a single job." + required: + - "jobId" + - "status" + - "jobType" + - "startTime" + - "connectionId" + type: "object" + properties: + jobId: + format: "int64" + type: "integer" + status: + $ref: "#/components/schemas/JobStatusEnum" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + startTime: + type: "string" + connectionId: + format: "UUID" + type: "string" + lastUpdatedAt: + type: "string" + duration: + description: "Duration of a sync in ISO_8601 format" + type: "string" + bytesSynced: + format: "int64" + type: "integer" + rowsSynced: + format: "int64" + type: "integer" + example: + id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + duration: "PT8H6M12S" + x-speakeasy-component: true + JobsResponse: + title: "Root Type for JobsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/JobResponse" + example: + next: "https://api.airbyte.com/v1/jobs?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/jobs?limit=5&offset=0" + data: + - id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + status: "running" + jobType: "sync" + startTime: "2023-03-25T01:30:50Z" + x-speakeasy-component: true + ConnectionCreateRequest: + required: + - "sourceId" + - "destinationId" + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + sourceId: + format: "uuid" + type: "string" + destinationId: + format: "uuid" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: Connection + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + ConnectionPatchRequest: + type: "object" + properties: + name: + description: "Optional name of the connection" + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + schedule: + $ref: "#/components/schemas/AirbyteApiConnectionSchedule" + dataResidency: + $ref: "#/components/schemas/GeographyEnumNoDefault" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnumNoDefault" + namespaceFormat: + type: "string" + description: + "Used when namespaceDefinition is 'custom_format'. If blank\ + \ then behaves like namespaceDefinition = 'destination'. If \"${SOURCE_NAMESPACE}\"\ + \ then behaves like namespaceDefinition = 'source'." + default: null + example: "${SOURCE_NAMESPACE}" + prefix: + type: "string" + description: + "Prefix that will be prepended to the name of each stream when\ + \ it is written to the destination (ex. “airbyte_” causes “projects” =>\ + \ “airbyte_projects”)." + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnumNoDefault" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + x-speakeasy-entity: Connection + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + JobCreateRequest: + title: "Root Type for JobCreate" + description: + "Creates a new Job from the configuration provided in the request\ + \ body." + required: + - "jobType" + - "connectionId" + type: "object" + properties: + connectionId: + format: "UUID" + type: "string" + jobType: + $ref: "#/components/schemas/JobTypeEnum" + example: + connectionId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + jobType: "sync" + x-speakeasy-component: true + JobStatusEnum: + enum: + - "pending" + - "running" + - "incomplete" + - "failed" + - "succeeded" + - "cancelled" + type: "string" + x-speakeasy-component: true + JobTypeEnum: + description: + "Enum that describes the different types of jobs that the platform\ + \ runs." + enum: + - "sync" + - "reset" + - "refresh" + - "clear" + type: "string" + x-speakeasy-component: true + SourceCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the source e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.sourceType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + SourcePatchRequest: + type: "object" + properties: + name: + type: "string" + example: "My source" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + secretId: + description: "Optional secretID obtained through the OAuth redirect flow." + type: "string" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + InitiateOauthRequest: + title: "Root Type for initiate-oauth-post-body" + required: + - "redirectUrl" + - "workspaceId" + type: "object" + properties: + name: + description: + "The name of the source to authenticate to. Deprecated - use\ + \ sourceType instead." + type: "string" + sourceType: + description: "The name of the source to authenticate to" + type: "string" + redirectUrl: + description: + "The URL to redirect the user to with the OAuth secret stored\ + \ in the secret_id query string parameter after authentication is complete." + type: "string" + workspaceId: + format: "uuid" + description: + "The workspace to create the secret and eventually the full\ + \ source." + type: "string" + oAuthInputConfiguration: + $ref: "#/components/schemas/OAuthInputConfiguration" + description: "Input configuration for OAuth required by some sources." + example: + redirectUrl: "https://cloud.airbyte.io/v1/api/oauth/callback" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + destinationId: "3d93b16c-ff5f-421c-8908-5a3c82088f14" + x-speakeasy-component: true + WorkspaceOAuthCredentialsRequest: + title: "Root Type for WorkspaceOAuthCredentials" + description: "POST body for creating/updating workspace level OAuth credentials" + required: + - "actorType" + - "name" + - "configuration" + type: "object" + properties: + actorType: + $ref: "#/components/schemas/ActorTypeEnum" + name: + type: "string" + description: "The name of the source i.e. google-ads" + configuration: + $ref: "#/components/schemas/OAuthCredentialsConfiguration" + x-speakeasy-component: true + OAuthCredentialsConfiguration: + description: + "The configuration for this source/destination based on the OAuth\ + \ section of the relevant specification." + type: "object" + example: + credentials: + client_id: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + client_secret: "shhhhhh" + x-speakeasy-component: true + ConnectionResponse: + title: "Root Type for ConnectionResponse" + description: "Provides details of a single connection." + type: "object" + required: + - "connectionId" + - "name" + - "sourceId" + - "destinationId" + - "workspaceId" + - "status" + - "schedule" + - "dataResidency" + - "configurations" + properties: + connectionId: + format: "UUID" + type: "string" + name: + type: "string" + sourceId: + format: "UUID" + type: "string" + destinationId: + format: "UUID" + type: "string" + workspaceId: + format: "UUID" + type: "string" + status: + $ref: "#/components/schemas/ConnectionStatusEnum" + schedule: + $ref: "#/components/schemas/ConnectionScheduleResponse" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + nonBreakingSchemaUpdatesBehavior: + $ref: "#/components/schemas/NonBreakingSchemaUpdatesBehaviorEnum" + namespaceDefinition: + $ref: "#/components/schemas/NamespaceDefinitionEnum" + namespaceFormat: + type: "string" + prefix: + type: "string" + configurations: + $ref: "#/components/schemas/StreamConfigurations" + x-speakeasy-entity: Connection + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + AirbyteApiConnectionSchedule: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeEnum" + cronExpression: + type: "string" + x-speakeasy-component: true + ScheduleTypeEnum: + type: "string" + enum: + - "manual" + - "cron" + x-speakeasy-component: true + ConnectionScheduleResponse: + description: + "schedule for when the the connection should run, per the schedule\ + \ type" + type: "object" + required: + - "scheduleType" + properties: + scheduleType: + $ref: "#/components/schemas/ScheduleTypeWithBasicEnum" + cronExpression: + type: "string" + basicTiming: + type: "string" + x-speakeasy-component: true + ScheduleTypeWithBasicEnum: + type: "string" + enum: + - "manual" + - "cron" + - "basic" + x-speakeasy-component: true + GeographyEnum: + type: "string" + enum: + - "auto" + - "us" + - "eu" + default: "auto" + x-speakeasy-component: true + GeographyEnumNoDefault: + type: "string" + enum: + - "auto" + - "us" + - "eu" + x-speakeasy-component: true + ConnectionStatusEnum: + type: "string" + enum: + - "active" + - "inactive" + - "deprecated" + x-speakeasy-component: true + NamespaceDefinitionEnum: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + default: "destination" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnum: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + default: "ignore" + x-speakeasy-component: true + NamespaceDefinitionEnumNoDefault: + type: "string" + description: "Define the location where the data will be stored in the destination" + enum: + - "source" + - "destination" + - "custom_format" + x-speakeasy-component: true + NonBreakingSchemaUpdatesBehaviorEnumNoDefault: + type: "string" + description: + "Set how Airbyte handles syncs when it detects a non-breaking schema\ + \ change in the source" + enum: + - "ignore" + - "disable_connection" + - "propagate_columns" + - "propagate_fully" + x-speakeasy-component: true + DestinationResponse: + title: "Root Type for DestinationResponse" + description: "Provides details of a single destination." + type: "object" + required: + - "destinationId" + - "name" + - "destinationType" + - "workspaceId" + - "configuration" + properties: + destinationId: + format: "UUID" + type: "string" + name: + type: "string" + destinationType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + example: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + SourceResponse: + title: "Root Type for SourceResponse" + description: "Provides details of a single source." + type: "object" + required: + - "sourceId" + - "name" + - "sourceType" + - "workspaceId" + - "configuration" + properties: + sourceId: + format: "UUID" + type: "string" + name: + type: "string" + sourceType: + type: "string" + workspaceId: + format: "UUID" + type: "string" + configuration: + $ref: "#/components/schemas/SourceConfiguration" + example: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + description: "Name of the destination e.g. dev-mysql-instance." + type: "string" + definitionId: + description: + "The UUID of the connector definition. One of configuration.destinationType\ + \ or definitionId must be provided." + format: "uuid" + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPatchRequest: + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + DestinationPutRequest: + required: + - "name" + - "configuration" + type: "object" + properties: + name: + type: "string" + configuration: + $ref: "#/components/schemas/DestinationConfiguration" + x-implements: "io.airbyte.api.common.ConfigurableActor" + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceCreateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + organizationId: + description: "ID of organization to add workspace to." + format: "uuid" + type: "string" + x-speakeasy-entity: Workspace + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceUpdateRequest: + required: + - "name" + type: "object" + properties: + name: + description: "Name of the workspace" + type: "string" + x-speakeasy-entity: Workspace + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + WorkspaceResponse: + title: "Root Type for WorkspaceResponse" + description: "Provides details of a single workspace." + type: "object" + required: + - "workspaceId" + - "name" + - "dataResidency" + properties: + workspaceId: + format: "UUID" + type: "string" + name: + type: "string" + dataResidency: + $ref: "#/components/schemas/GeographyEnum" + x-speakeasy-entity: Workspace + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UserResponse: + title: "Root Type for UserResponse" + description: "Provides details of a single user in an organization." + type: "object" + required: + - "id" + - "name" + - "email" + properties: + name: + description: "Name of the user" + type: "string" + id: + $ref: "#/components/schemas/UserId" + email: + type: "string" + format: "email" + x-speakeasy-entity: User + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + UsersResponse: + title: "Root Type for UsersResponse" + description: "List/Array of multiple users in an organization" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/UserResponse" + x-speakeasy-component: true + x-speakeasy-entity: User + x-speakeasy-param-suppress-computed-diff: true + PermissionCreateRequest: + required: + - "permissionType" + - "userId" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PublicPermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionUpdateRequest: + required: + - "permissionType" + type: "object" + properties: + permissionType: + $ref: "#/components/schemas/PermissionType" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionResponse: + title: "Root Type for PermissionResponse" + description: "Provides details of a single permission." + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + workspaceId: + $ref: "#/components/schemas/WorkspaceId" + organizationId: + $ref: "#/components/schemas/OrganizationId" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionScope: + description: "Scope of a single permission, e.g. workspace, organization" + type: "string" + enum: + - "workspace" + - "organization" + - "none" + x-speakeasy-component: true + PermissionResponseRead: + title: "Root type for PermissionResponseRead" + description: "Reformat PermissionResponse with permission scope" + type: "object" + required: + - "permissionId" + - "permissionType" + - "userId" + - "scope" + - "scopeId" + properties: + permissionId: + type: "string" + format: "uuid" + permissionType: + $ref: "#/components/schemas/PermissionType" + userId: + $ref: "#/components/schemas/UserId" + scopeId: + type: "string" + format: "uuid" + scope: + $ref: "#/components/schemas/PermissionScope" + x-speakeasy-entity: Permission + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + PermissionsResponse: + title: "Root Type for PermissionsResponse" + description: "List/Array of multiple permissions" + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/PermissionResponseRead" + x-speakeasy-component: true + OrganizationResponse: + title: "Root Type for OrganizationResponse" + description: "Provides details of a single organization for a user." + type: "object" + required: + - "organizationId" + - "organizationName" + - "email" + properties: + organizationId: + $ref: "#/components/schemas/OrganizationId" + organizationName: + type: "string" + email: + type: "string" + format: "email" + x-speakeasy-param-suppress-computed-diff: true + x-speakeasy-component: true + OrganizationsResponse: + title: "Root Type for OrganizationsResponse" + description: "List/Array of multiple organizations." + required: + - "data" + type: "object" + properties: + data: + type: "array" + items: + $ref: "#/components/schemas/OrganizationResponse" + x-speakeasy-component: true + ConnectionsResponse: + title: "Root Type for ConnectionsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/ConnectionResponse" + default: [] + example: + next: "https://api.airbyte.com/v1/connections?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/connections?limit=5&offset=0" + data: + - name: "test-connection" + - connection_id: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + - sourceId: "49237019-645d-47d4-b45b-5eddf97775ce" + - destinationId: "al312fs-0ab1-4f72-9ed7-0b8fc27c5826" + - schedule: + scheduleType: "manual" + - status: "active" + - dataResidency: "auto" + x-speakeasy-component: true + SourcesResponse: + title: "Root Type for SourcesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/SourceResponse" + example: + next: "https://api.airbyte.com/v1/sources?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/sources?limit=5&offset=0" + data: + sourceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + sourceType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + DestinationsResponse: + title: "Root Type for DestinationsResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/DestinationResponse" + example: + next: "https://api.airbyte.com/v1/destinations?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/destinations?limit=5&offset=0" + data: + destinationId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Analytics Team Postgres" + destinationType: "postgres" + workspaceId: "871d9b60-11d1-44cb-8c92-c246d53bf87e" + x-speakeasy-component: true + WorkspacesResponse: + title: "Root Type for WorkspacesResponse" + description: "" + required: + - "data" + type: "object" + properties: + previous: + type: "string" + next: + type: "string" + data: + type: "array" + items: + $ref: "#/components/schemas/WorkspaceResponse" + example: + next: "https://api.airbyte.com/v1/workspaces?limit=5&offset=10" + previous: "https://api.airbyte.com/v1/workspaces?limit=5&offset=0" + data: + workspaceId: "18dccc91-0ab1-4f72-9ed7-0b8fc27c5826" + name: "Acme Company" + dataResidency: "auto" + x-speakeasy-component: true + StreamConfiguration: + description: "Configurations for a single stream." + type: "object" + required: + - "name" + properties: + name: + type: "string" + syncMode: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + cursorField: + description: + "Path to the field that will be used to determine if a record\ + \ is new or modified since the last sync. This field is REQUIRED if `sync_mode`\ + \ is `incremental` unless there is a default." + type: "array" + items: + type: "string" + primaryKey: + description: + "Paths to the fields that will be used as primary key. This\ + \ field is REQUIRED if `destination_sync_mode` is `*_dedup` unless it\ + \ is already supplied by the source schema." + type: "array" + items: + type: "array" + items: + type: "string" + selectedFields: + description: + "By default (if not provided in the request) all fields will\ + \ be synced. Otherwise, only the fields in this list will be synced." + $ref: "#/components/schemas/SelectedFields" + x-speakeasy-component: true + StreamConfigurations: + description: "A list of configured stream options for a connection." + type: "object" + properties: + streams: + type: "array" + items: + $ref: "#/components/schemas/StreamConfiguration" + x-speakeasy-component: true + StreamPropertiesResponse: + description: "A list of stream properties." + type: "array" + items: + $ref: "#/components/schemas/StreamProperties" + x-speakeasy-component: true + StreamProperties: + description: "The stream properties associated with a connection." + type: "object" + properties: + streamName: + type: "string" + syncModes: + type: "array" + items: + $ref: "#/components/schemas/ConnectionSyncModeEnum" + defaultCursorField: + type: "array" + items: + type: "string" + sourceDefinedCursorField: + type: "boolean" + sourceDefinedPrimaryKey: + type: "array" + items: + type: "array" + items: + type: "string" + propertyFields: + type: "array" + items: + type: "array" + items: + type: "string" + x-speakeasy-component: true + ConnectionSyncModeEnum: + enum: + - "full_refresh_overwrite" + - "full_refresh_append" + - "incremental_append" + - "incremental_deduped_history" + x-speakeasy-component: true + ActorTypeEnum: + description: "Whether you're setting this override for a source or destination" + enum: + - "source" + - "destination" + x-speakeasy-component: true + source-trello: + type: "object" + required: + - "key" + - "token" + - "start_date" + - "sourceType" + properties: + key: + type: "string" + title: "API key" + description: + "Trello API key. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + token: + type: "string" + title: "API token" + description: + "Trello API token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 2 + board_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9a-fA-F]{24}$" + title: "Trello Board IDs" + description: + "IDs of the boards to replicate data from. If left empty, data\ + \ from all boards to which you have access will be replicated. Please\ + \ note that this is not the 8-character ID in the board's shortLink (URL\ + \ of the board). Rather, what is required here is the 24-character ID\ + \ usually returned by the API" + order: 3 + sourceType: + title: "trello" + const: "trello" + enum: + - "trello" + order: 0 + type: "string" + source-trello-update: + type: "object" + required: + - "key" + - "token" + - "start_date" + properties: + key: + type: "string" + title: "API key" + description: + "Trello API key. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + token: + type: "string" + title: "API token" + description: + "Trello API token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 2 + board_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9a-fA-F]{24}$" + title: "Trello Board IDs" + description: + "IDs of the boards to replicate data from. If left empty, data\ + \ from all boards to which you have access will be replicated. Please\ + \ note that this is not the 8-character ID in the board's shortLink (URL\ + \ of the board). Rather, what is required here is the 24-character ID\ + \ usually returned by the API" + order: 3 + source-the-guardian-api: + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + description: + "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: + "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: + "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: + "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: + "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + sourceType: + title: "the-guardian-api" + const: "the-guardian-api" + enum: + - "the-guardian-api" + order: 0 + type: "string" + source-the-guardian-api-update: + title: "The Guardian Api Spec" + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + description: + "Use this to set the minimum date (YYYY-MM-DD) of the results.\ + \ Results older than the start_date will not be shown." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + query: + title: "Query" + type: "string" + description: + "(Optional) The query (q) parameter filters the results to\ + \ only those that include that search term. The q parameter supports AND,\ + \ OR and NOT operators." + examples: + - "environment AND NOT water" + - "environment AND political" + - "amusement park" + - "political" + tag: + title: "Tag" + type: "string" + description: + "(Optional) A tag is a piece of data that is used by The Guardian\ + \ to categorise content. Use this parameter to filter results by showing\ + \ only the ones matching the entered tag. See here for a list of all tags, and here for the tags endpoint documentation." + examples: + - "environment/recycling" + - "environment/plasticbags" + - "environment/energyefficiency" + section: + title: "Section" + type: "string" + description: + "(Optional) Use this to filter the results by a particular\ + \ section. See here for a list of all sections, and here for the sections endpoint documentation." + examples: + - "media" + - "technology" + - "housing-network" + end_date: + title: "End Date" + type: "string" + description: + "(Optional) Use this to set the maximum date (YYYY-MM-DD) of\ + \ the results. Results newer than the end_date will not be shown. Default\ + \ is set to the current date (today) for incremental syncs." + pattern: "^([1-9][0-9]{3})\\-(0?[1-9]|1[012])\\-(0?[1-9]|[12][0-9]|3[01])$" + examples: + - "YYYY-MM-DD" + source-harvest: + title: "Harvest Spec" + type: "object" + required: + - "account_id" + - "replication_start_date" + - "sourceType" + properties: + account_id: + title: "Account ID" + description: + "Harvest account ID. Required for all Harvest requests in pair\ + \ with Personal Access Token" + airbyte_secret: true + type: "string" + order: 0 + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + order: 1 + format: "date-time" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: "object" + order: 3 + oneOf: + - type: "object" + title: "Authenticate via Harvest (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_token: + title: "Personal Access Token" + description: + "Log into Harvest and then create new personal access token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "harvest" + const: "harvest" + enum: + - "harvest" + order: 0 + type: "string" + source-harvest-update: + title: "Harvest Spec" + type: "object" + required: + - "account_id" + - "replication_start_date" + properties: + account_id: + title: "Account ID" + description: + "Harvest account ID. Required for all Harvest requests in pair\ + \ with Personal Access Token" + airbyte_secret: true + type: "string" + order: 0 + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + order: 1 + format: "date-time" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Harvest." + type: "object" + order: 3 + oneOf: + - type: "object" + title: "Authenticate via Harvest (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Harvest developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Harvest developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_token: + title: "Personal Access Token" + description: + "Log into Harvest and then create new personal access token." + type: "string" + airbyte_secret: true + source-yotpo: + type: "object" + required: + - "access_token" + - "app_key" + - "start_date" + - "email" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\ + \ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)" + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + app_key: + type: "string" + description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)" + title: "App Key" + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + email: + type: "string" + description: "Email address registered with yotpo." + title: "Registered email address" + default: "example@gmail.com" + order: 3 + sourceType: + title: "yotpo" + const: "yotpo" + enum: + - "yotpo" + order: 0 + type: "string" + source-yotpo-update: + type: "object" + required: + - "access_token" + - "app_key" + - "start_date" + - "email" + properties: + access_token: + type: "string" + description: + "Access token recieved as a result of API call to https://api.yotpo.com/oauth/token\ + \ (Ref- https://apidocs.yotpo.com/reference/yotpo-authentication)" + title: "Access Token" + airbyte_secret: true + order: 0 + app_key: + type: "string" + description: "App key found at settings (Ref- https://settings.yotpo.com/#/general_settings)" + title: "App Key" + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + email: + type: "string" + description: "Email address registered with yotpo." + title: "Registered email address" + default: "example@gmail.com" + order: 3 + source-prestashop: + title: "PrestaShop Spec" + type: "object" + required: + - "access_key" + - "url" + - "start_date" + - "sourceType" + properties: + access_key: + type: "string" + title: "Access Key" + description: + "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + format: "date" + order: 2 + sourceType: + title: "prestashop" + const: "prestashop" + enum: + - "prestashop" + order: 0 + type: "string" + source-prestashop-update: + title: "PrestaShop Spec" + type: "object" + required: + - "access_key" + - "url" + - "start_date" + properties: + access_key: + type: "string" + title: "Access Key" + description: + "Your PrestaShop access key. See the docs for info on how to obtain this." + order: 0 + airbyte_secret: true + url: + type: "string" + title: "Shop URL" + description: "Shop URL without trailing slash." + order: 1 + start_date: + type: "string" + title: "Start date" + description: "The Start date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-01" + format: "date" + order: 2 + source-netsuite: + title: "Netsuite Spec" + type: "object" + required: + - "realm" + - "consumer_key" + - "consumer_secret" + - "token_key" + - "token_secret" + - "start_datetime" + - "sourceType" + properties: + realm: + type: "string" + title: "Realm (Account Id)" + description: + "Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1,\ + \ as for the `sandbox`" + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + consumer_key: + type: "string" + title: "Consumer Key" + description: "Consumer key associated with your integration" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + consumer_secret: + type: "string" + title: "Consumer Secret" + description: "Consumer secret associated with your integration" + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_key: + type: "string" + title: "Token Key (Token Id)" + description: "Access token key" + order: 3 + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_secret: + type: "string" + title: "Token Secret" + description: "Access token secret" + order: 4 + airbyte_secret: true + x-speakeasy-param-sensitive: true + object_types: + type: "array" + title: "Object Types" + items: + type: "string" + description: + "The API names of the Netsuite objects you want to sync. Setting\ + \ this speeds up the connection setup process by limiting the number of\ + \ schemas that need to be retrieved from Netsuite." + order: 5 + examples: + - "customer" + - "salesorder" + - "etc" + default: [] + start_datetime: + type: "string" + title: "Start Date" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DDTHH:mm:ssZ\"" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + window_in_days: + type: "integer" + title: "Window in Days" + description: + "The amount of days used to query the data with date chunks.\ + \ Set smaller value, if you have lots of data." + order: 7 + default: 30 + sourceType: + title: "netsuite" + const: "netsuite" + enum: + - "netsuite" + order: 0 + type: "string" + source-netsuite-update: + title: "Netsuite Spec" + type: "object" + required: + - "realm" + - "consumer_key" + - "consumer_secret" + - "token_key" + - "token_secret" + - "start_datetime" + properties: + realm: + type: "string" + title: "Realm (Account Id)" + description: + "Netsuite realm e.g. 2344535, as for `production` or 2344535_SB1,\ + \ as for the `sandbox`" + order: 0 + airbyte_secret: true + consumer_key: + type: "string" + title: "Consumer Key" + description: "Consumer key associated with your integration" + order: 1 + airbyte_secret: true + consumer_secret: + type: "string" + title: "Consumer Secret" + description: "Consumer secret associated with your integration" + order: 2 + airbyte_secret: true + token_key: + type: "string" + title: "Token Key (Token Id)" + description: "Access token key" + order: 3 + airbyte_secret: true + token_secret: + type: "string" + title: "Token Secret" + description: "Access token secret" + order: 4 + airbyte_secret: true + object_types: + type: "array" + title: "Object Types" + items: + type: "string" + description: + "The API names of the Netsuite objects you want to sync. Setting\ + \ this speeds up the connection setup process by limiting the number of\ + \ schemas that need to be retrieved from Netsuite." + order: 5 + examples: + - "customer" + - "salesorder" + - "etc" + default: [] + start_datetime: + type: "string" + title: "Start Date" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DDTHH:mm:ssZ\"" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + window_in_days: + type: "integer" + title: "Window in Days" + description: + "The amount of days used to query the data with date chunks.\ + \ Set smaller value, if you have lots of data." + order: 7 + default: 30 + source-convex: + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + - "sourceType" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "convex" + const: "convex" + enum: + - "convex" + order: 0 + type: "string" + source-convex-update: + title: "Convex Source Spec" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + title: "Deployment Url" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + title: "Access Key" + description: "API access key used to retrieve data from Convex." + airbyte_secret: true + source-recurly: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recurly API Key. See the docs for more information on how to generate this key." + order: 0 + x-speakeasy-param-sensitive: true + begin_time: + type: "string" + description: + "ISO8601 timestamp from which the replication from Recurly\ + \ API will start from." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 1 + end_time: + type: "string" + description: + "ISO8601 timestamp to which the replication from Recurly API\ + \ will stop. Records after that date won't be imported." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 2 + sourceType: + title: "recurly" + const: "recurly" + enum: + - "recurly" + order: 0 + type: "string" + source-recurly-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recurly API Key. See the docs for more information on how to generate this key." + order: 0 + begin_time: + type: "string" + description: + "ISO8601 timestamp from which the replication from Recurly\ + \ API will start from." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 1 + end_time: + type: "string" + description: + "ISO8601 timestamp to which the replication from Recurly API\ + \ will stop. Records after that date won't be imported." + examples: + - "2021-12-01T00:00:00" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + order: 2 + source-pennylane: + type: "object" + required: + - "start_time" + - "api_key" + - "sourceType" + properties: + start_time: + type: "string" + order: 0 + title: "Start time" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "pennylane" + const: "pennylane" + enum: + - "pennylane" + order: 0 + type: "string" + source-pennylane-update: + type: "object" + required: + - "start_time" + - "api_key" + properties: + start_time: + type: "string" + order: 0 + title: "Start time" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + source-teamwork: + type: "object" + required: + - "username" + - "site_name" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + site_name: + type: "string" + description: "The teamwork site name appearing at the url" + order: 2 + title: "Site Name" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "teamwork" + const: "teamwork" + enum: + - "teamwork" + order: 0 + type: "string" + source-teamwork-update: + type: "object" + required: + - "username" + - "site_name" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + site_name: + type: "string" + description: "The teamwork site name appearing at the url" + order: 2 + title: "Site Name" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-zendesk-chat: + title: "Zendesk Chat Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Chat API, in the format YYYY-MM-DDT00:00:00Z." + examples: + - "2021-02-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + subdomain: + type: "string" + title: "Subdomain" + description: + "Required if you access Zendesk Chat from a Zendesk Support\ + \ subdomain." + default: "" + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "credentials" + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access Token" + required: + - "credentials" + - "access_token" + properties: + credentials: + type: "string" + const: "access_token" + order: 0 + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: "The Access Token to make authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zendesk-chat" + const: "zendesk-chat" + enum: + - "zendesk-chat" + order: 0 + type: "string" + source-zendesk-chat-update: + title: "Zendesk Chat Spec" + type: "object" + required: + - "start_date" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Chat API, in the format YYYY-MM-DDT00:00:00Z." + examples: + - "2021-02-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + subdomain: + type: "string" + title: "Subdomain" + description: + "Required if you access Zendesk Chat from a Zendesk Support\ + \ subdomain." + default: "" + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "credentials" + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + - type: "object" + title: "Access Token" + required: + - "credentials" + - "access_token" + properties: + credentials: + type: "string" + const: "access_token" + order: 0 + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: "The Access Token to make authenticated requests." + airbyte_secret: true + source-when-i-work: + type: "object" + required: + - "email" + - "password" + - "sourceType" + properties: + email: + type: "string" + description: "Email of your when-i-work account" + title: "Email" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + password: + type: "string" + description: "Password for your when-i-work account" + title: "Password" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "when-i-work" + const: "when-i-work" + enum: + - "when-i-work" + order: 0 + type: "string" + source-when-i-work-update: + type: "object" + required: + - "email" + - "password" + properties: + email: + type: "string" + description: "Email of your when-i-work account" + title: "Email" + airbyte_secret: true + order: 0 + password: + type: "string" + description: "Password for your when-i-work account" + title: "Password" + airbyte_secret: true + order: 1 + source-my-hours: + title: "My Hours Spec" + type: "object" + required: + - "email" + - "password" + - "start_date" + - "sourceType" + properties: + email: + title: "Email" + type: "string" + description: "Your My Hours username" + example: "john@doe.com" + password: + title: "Password" + type: "string" + description: "The password associated to the username" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: "Start date for collecting time logs" + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: "Time logs batch size" + description: "Pagination size used for retrieving logs in days" + examples: + - 30 + type: "integer" + minimum: 1 + maximum: 365 + default: 30 + sourceType: + title: "my-hours" + const: "my-hours" + enum: + - "my-hours" + order: 0 + type: "string" + source-my-hours-update: + title: "My Hours Spec" + type: "object" + required: + - "email" + - "password" + - "start_date" + properties: + email: + title: "Email" + type: "string" + description: "Your My Hours username" + example: "john@doe.com" + password: + title: "Password" + type: "string" + description: "The password associated to the username" + airbyte_secret: true + start_date: + title: "Start Date" + description: "Start date for collecting time logs" + examples: + - "%Y-%m-%d" + - "2016-01-01" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + logs_batch_size: + title: "Time logs batch size" + description: "Pagination size used for retrieving logs in days" + examples: + - 30 + type: "integer" + minimum: 1 + maximum: 365 + default: 30 + source-7shifts: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Access token to use for authentication. Generate it in the\ + \ 7shifts Developer Tools." + name: "access_token" + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "7shifts" + const: "7shifts" + enum: + - "7shifts" + order: 0 + type: "string" + source-7shifts-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Access token to use for authentication. Generate it in the\ + \ 7shifts Developer Tools." + name: "access_token" + title: "Access Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-eventbrite: + type: "object" + required: + - "private_token" + - "start_date" + - "sourceType" + properties: + private_token: + type: "string" + description: "The private token to use for authenticating API requests." + name: "private_token" + order: 0 + title: "Private Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "eventbrite" + const: "eventbrite" + enum: + - "eventbrite" + order: 0 + type: "string" + source-eventbrite-update: + type: "object" + required: + - "private_token" + - "start_date" + properties: + private_token: + type: "string" + description: "The private token to use for authenticating API requests." + name: "private_token" + order: 0 + title: "Private Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-klaviyo: + title: "Klaviyo Spec" + type: "object" + properties: + api_key: + type: "string" + title: "Api Key" + description: + "Klaviyo API Key. See our docs if you need help finding this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This field is optional\ + \ - if not provided, all data will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 1 + disable_fetching_predictive_analytics: + type: "boolean" + title: "Disable Fetching Predictive Analytics" + description: + "Certain streams like the profiles stream can retrieve predictive\ + \ analytics data from Klaviyo's API. However, at high volume, this can\ + \ lead to service availability issues on the API which can be improved\ + \ by not fetching this field. WARNING: Enabling this setting will stop\ + \ the \"predictive_analytics\" column from being populated in your downstream\ + \ destination." + order: 2 + sourceType: + title: "klaviyo" + const: "klaviyo" + enum: + - "klaviyo" + order: 0 + type: "string" + required: + - "api_key" + - "sourceType" + source-klaviyo-update: + title: "Klaviyo Spec" + type: "object" + properties: + api_key: + type: "string" + title: "Api Key" + description: + "Klaviyo API Key. See our docs if you need help finding this key." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This field is optional\ + \ - if not provided, all data will be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 1 + disable_fetching_predictive_analytics: + type: "boolean" + title: "Disable Fetching Predictive Analytics" + description: + "Certain streams like the profiles stream can retrieve predictive\ + \ analytics data from Klaviyo's API. However, at high volume, this can\ + \ lead to service availability issues on the API which can be improved\ + \ by not fetching this field. WARNING: Enabling this setting will stop\ + \ the \"predictive_analytics\" column from being populated in your downstream\ + \ destination." + order: 2 + required: + - "api_key" + source-datadog: + type: "object" + required: + - "api_key" + - "application_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Datadog API key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + application_key: + type: "string" + description: "Datadog application key" + order: 1 + title: "Application Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + description: + "The search query. This just applies to Incremental syncs.\ + \ If empty, it'll collect all logs." + order: 2 + title: "Query" + start_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This just applies to Incremental\ + \ syncs." + order: 3 + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2023-12-01T00:00:00Z" + site: + type: "string" + description: "The site where Datadog data resides in." + enum: + - "datadoghq.com" + - "us3.datadoghq.com" + - "us5.datadoghq.com" + - "datadoghq.eu" + - "ddog-gov.com" + order: 4 + title: "Site" + default: "datadoghq.com" + end_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ + \ after this date will not be replicated. An empty value will represent\ + \ the current datetime for each execution. This just applies to Incremental\ + \ syncs." + order: 5 + title: "End date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2024-01-01T00:00:00Z" + max_records_per_request: + type: "integer" + description: "Maximum number of records to collect per request." + order: 6 + title: "Max records per requests" + default: 5000 + maximum: 5000 + minimum: 1 + queries: + type: "array" + description: "List of queries to be run and used as inputs." + items: + type: "object" + required: + - "name" + - "data_source" + - "query" + properties: + name: + type: "string" + description: "The variable name for use in queries." + order: 1 + title: "Query Name" + query: + type: "string" + description: "A classic query string." + order: 3 + title: "Query" + data_source: + type: "string" + description: "A data source that is powered by the platform." + enum: + - "metrics" + - "cloud_cost" + - "logs" + - "rum" + order: 2 + title: "Data Source" + order: 7 + title: "Queries" + default: [] + sourceType: + title: "datadog" + const: "datadog" + enum: + - "datadog" + order: 0 + type: "string" + source-datadog-update: + type: "object" + required: + - "api_key" + - "application_key" + properties: + api_key: + type: "string" + description: "Datadog API key" + order: 0 + title: "API Key" + airbyte_secret: true + application_key: + type: "string" + description: "Datadog application key" + order: 1 + title: "Application Key" + airbyte_secret: true + query: + type: "string" + description: + "The search query. This just applies to Incremental syncs.\ + \ If empty, it'll collect all logs." + order: 2 + title: "Query" + start_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. This just applies to Incremental\ + \ syncs." + order: 3 + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2023-12-01T00:00:00Z" + site: + type: "string" + description: "The site where Datadog data resides in." + enum: + - "datadoghq.com" + - "us3.datadoghq.com" + - "us5.datadoghq.com" + - "datadoghq.eu" + - "ddog-gov.com" + order: 4 + title: "Site" + default: "datadoghq.com" + end_date: + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ + \ after this date will not be replicated. An empty value will represent\ + \ the current datetime for each execution. This just applies to Incremental\ + \ syncs." + order: 5 + title: "End date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-10-01T00:00:00Z" + default: "2024-01-01T00:00:00Z" + max_records_per_request: + type: "integer" + description: "Maximum number of records to collect per request." + order: 6 + title: "Max records per requests" + default: 5000 + maximum: 5000 + minimum: 1 + queries: + type: "array" + description: "List of queries to be run and used as inputs." + items: + type: "object" + required: + - "name" + - "data_source" + - "query" + properties: + name: + type: "string" + description: "The variable name for use in queries." + order: 1 + title: "Query Name" + query: + type: "string" + description: "A classic query string." + order: 3 + title: "Query" + data_source: + type: "string" + description: "A data source that is powered by the platform." + enum: + - "metrics" + - "cloud_cost" + - "logs" + - "rum" + order: 2 + title: "Data Source" + order: 7 + title: "Queries" + default: [] + source-luma: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "Get your API key on lu.ma Calendars dashboard → Settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "luma" + const: "luma" + enum: + - "luma" + order: 0 + type: "string" + source-luma-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "Get your API key on lu.ma Calendars dashboard → Settings." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-dockerhub: + type: "object" + required: + - "docker_username" + - "sourceType" + properties: + docker_username: + type: "string" + order: 0 + title: "Docker Username" + description: + "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + sourceType: + title: "dockerhub" + const: "dockerhub" + enum: + - "dockerhub" + order: 0 + type: "string" + source-dockerhub-update: + type: "object" + required: + - "docker_username" + properties: + docker_username: + type: "string" + order: 0 + title: "Docker Username" + description: + "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ + \ API call)" + pattern: "^[a-z0-9_\\-]+$" + examples: + - "airbyte" + source-webflow: + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + - "sourceType" + properties: + site_id: + title: "Site id" + type: "string" + description: + "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + accept_version: + title: "Accept Version" + type: "string" + description: "The version of the Webflow API to use. See https://developers.webflow.com/#versioning" + example: "1.0.0" + order: 2 + sourceType: + title: "webflow" + const: "webflow" + enum: + - "webflow" + order: 0 + type: "string" + source-webflow-update: + title: "Webflow Spec" + type: "object" + required: + - "api_key" + - "site_id" + properties: + site_id: + title: "Site id" + type: "string" + description: + "The id of the Webflow site you are requesting data from. See\ + \ https://developers.webflow.com/#sites" + example: "a relatively long hex sequence" + order: 0 + api_key: + title: "API token" + type: "string" + description: "The API token for authenticating to Webflow. See https://university.webflow.com/lesson/intro-to-the-webflow-api" + example: "a very long hex sequence" + order: 1 + airbyte_secret: true + accept_version: + title: "Accept Version" + type: "string" + description: "The version of the Webflow API to use. See https://developers.webflow.com/#versioning" + example: "1.0.0" + order: 2 + source-scryfall: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "scryfall" + const: "scryfall" + enum: + - "scryfall" + order: 0 + type: "string" + source-scryfall-update: + type: "object" + required: [] + properties: {} + source-beamer: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "beamer" + const: "beamer" + enum: + - "beamer" + order: 0 + type: "string" + source-beamer-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-high-level: + type: "object" + required: + - "location_id" + - "api_key" + - "start_date" + - "sourceType" + properties: + location_id: + type: "string" + order: 0 + title: "Location ID" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "high-level" + const: "high-level" + enum: + - "high-level" + order: 0 + type: "string" + source-high-level-update: + type: "object" + required: + - "location_id" + - "api_key" + - "start_date" + properties: + location_id: + type: "string" + order: 0 + title: "Location ID" + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-wikipedia-pageviews: + type: "object" + required: + - "access" + - "agent" + - "article" + - "country" + - "end" + - "project" + - "start" + - "sourceType" + properties: + access: + type: "string" + title: "Access" + description: + "If you want to filter by access method, use one of desktop,\ + \ mobile-app or mobile-web. If you are interested in pageviews regardless\ + \ of access method, use all-access." + examples: + - "all-access" + - "desktop" + - "mobile-app" + - "mobile-web" + order: 0 + agent: + type: "string" + title: "Agent" + description: + "If you want to filter by agent type, use one of user, automated\ + \ or spider. If you are interested in pageviews regardless of agent type,\ + \ use all-agents." + examples: + - "all-agents" + - "user" + - "spider" + - "automated" + order: 1 + article: + type: "string" + title: "Article" + description: + "The title of any article in the specified project. Any spaces\ + \ should be replaced with underscores. It also should be URI-encoded,\ + \ so that non-URI-safe characters like %, / or ? are accepted." + examples: + - "Are_You_the_One%3F" + order: 2 + country: + type: "string" + title: "Country" + description: + "The ISO 3166-1 alpha-2 code of a country for which to retrieve\ + \ top articles." + examples: + - "FR" + - "IN" + order: 3 + end: + type: "string" + title: "End" + description: + "The date of the last day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format." + order: 4 + project: + type: "string" + title: "Project" + description: + "If you want to filter by project, use the domain of any Wikimedia\ + \ project." + examples: + - "en.wikipedia.org" + - "www.mediawiki.org" + - "commons.wikimedia.org" + order: 5 + start: + type: "string" + title: "Start" + description: + "The date of the first day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format. Also serves as the date to retrieve data for the top articles." + order: 6 + sourceType: + title: "wikipedia-pageviews" + const: "wikipedia-pageviews" + enum: + - "wikipedia-pageviews" + order: 0 + type: "string" + source-wikipedia-pageviews-update: + type: "object" + required: + - "access" + - "agent" + - "article" + - "country" + - "end" + - "project" + - "start" + properties: + access: + type: "string" + title: "Access" + description: + "If you want to filter by access method, use one of desktop,\ + \ mobile-app or mobile-web. If you are interested in pageviews regardless\ + \ of access method, use all-access." + examples: + - "all-access" + - "desktop" + - "mobile-app" + - "mobile-web" + order: 0 + agent: + type: "string" + title: "Agent" + description: + "If you want to filter by agent type, use one of user, automated\ + \ or spider. If you are interested in pageviews regardless of agent type,\ + \ use all-agents." + examples: + - "all-agents" + - "user" + - "spider" + - "automated" + order: 1 + article: + type: "string" + title: "Article" + description: + "The title of any article in the specified project. Any spaces\ + \ should be replaced with underscores. It also should be URI-encoded,\ + \ so that non-URI-safe characters like %, / or ? are accepted." + examples: + - "Are_You_the_One%3F" + order: 2 + country: + type: "string" + title: "Country" + description: + "The ISO 3166-1 alpha-2 code of a country for which to retrieve\ + \ top articles." + examples: + - "FR" + - "IN" + order: 3 + end: + type: "string" + title: "End" + description: + "The date of the last day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format." + order: 4 + project: + type: "string" + title: "Project" + description: + "If you want to filter by project, use the domain of any Wikimedia\ + \ project." + examples: + - "en.wikipedia.org" + - "www.mediawiki.org" + - "commons.wikimedia.org" + order: 5 + start: + type: "string" + title: "Start" + description: + "The date of the first day to include, in YYYYMMDD or YYYYMMDDHH\ + \ format. Also serves as the date to retrieve data for the top articles." + order: 6 + source-google-directory: + title: "Google Directory Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Google Credentials" + description: + "Google APIs use the OAuth 2.0 protocol for authentication\ + \ and authorization. The Source supports Web server application and Service accounts scenarios." + type: "object" + oneOf: + - title: "Sign in via Google (OAuth)" + description: + "For these scenario user only needs to give permission to\ + \ read Google Directory data." + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Web server app" + order: 0 + enum: + - "Web server app" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of the developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client secret" + type: "string" + description: "The Client Secret of the developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The Token for obtaining a new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Service Account Key" + description: + "For these scenario user should obtain service account's\ + \ credentials from the Google API Console and provide delegated email." + type: "object" + required: + - "credentials_json" + - "email" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Service accounts" + order: 0 + enum: + - "Service accounts" + credentials_json: + type: "string" + title: "Credentials JSON" + description: + "The contents of the JSON service account key. See the\ + \ docs for more information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + type: "string" + title: "Email" + description: + "The email of the user, which has permissions to access\ + \ the Google Workspace Admin APIs." + sourceType: + title: "google-directory" + const: "google-directory" + enum: + - "google-directory" + order: 0 + type: "string" + source-google-directory-update: + title: "Google Directory Spec" + type: "object" + required: [] + properties: + credentials: + title: "Google Credentials" + description: + "Google APIs use the OAuth 2.0 protocol for authentication\ + \ and authorization. The Source supports Web server application and Service accounts scenarios." + type: "object" + oneOf: + - title: "Sign in via Google (OAuth)" + description: + "For these scenario user only needs to give permission to\ + \ read Google Directory data." + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Web server app" + order: 0 + enum: + - "Web server app" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of the developer application." + airbyte_secret: true + client_secret: + title: "Client secret" + type: "string" + description: "The Client Secret of the developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The Token for obtaining a new access token." + airbyte_secret: true + - title: "Service Account Key" + description: + "For these scenario user should obtain service account's\ + \ credentials from the Google API Console and provide delegated email." + type: "object" + required: + - "credentials_json" + - "email" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Authentication Scenario" + const: "Service accounts" + order: 0 + enum: + - "Service accounts" + credentials_json: + type: "string" + title: "Credentials JSON" + description: + "The contents of the JSON service account key. See the\ + \ docs for more information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: + "The email of the user, which has permissions to access\ + \ the Google Workspace Admin APIs." + source-smartengage: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "smartengage" + const: "smartengage" + enum: + - "smartengage" + order: 0 + type: "string" + source-smartengage-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + source-outbrain-amplify: + title: "Outbrain Amplify Spec" + type: "object" + required: + - "credentials" + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authentication Method" + description: + "Credentials for making authenticated requests requires either\ + \ username/password or access_token." + default: {} + order: 0 + type: "object" + oneOf: + - title: "Access token" + type: "object" + properties: + type: + title: "Access token is required for authentication requests." + const: "access_token" + type: "string" + enum: + - "access_token" + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "type" + - "access_token" + - title: "Username Password" + type: "object" + properties: + type: + title: + "Both username and password is required for authentication\ + \ request." + const: "username_password" + type: "string" + enum: + - "username_password" + username: + type: "string" + description: "Add Username for authentication." + password: + type: "string" + description: "Add Password for authentication." + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "type" + - "username" + - "password" + report_granularity: + title: "Granularity for periodic reports." + description: + "The granularity used for periodic data in reports. See the docs." + enum: + - "daily" + - "weekly" + - "monthly" + order: 1 + type: "string" + geo_location_breakdown: + title: "Granularity for geo-location region." + description: "The granularity used for geo location data in reports." + enum: + - "country" + - "region" + - "subregion" + order: 2 + type: "string" + start_date: + type: "string" + order: 3 + description: + "Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + order: 4 + description: "Date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + sourceType: + title: "outbrain-amplify" + const: "outbrain-amplify" + enum: + - "outbrain-amplify" + order: 0 + type: "string" + source-outbrain-amplify-update: + title: "Outbrain Amplify Spec" + type: "object" + required: + - "credentials" + - "start_date" + properties: + credentials: + title: "Authentication Method" + description: + "Credentials for making authenticated requests requires either\ + \ username/password or access_token." + default: {} + order: 0 + type: "object" + oneOf: + - title: "Access token" + type: "object" + properties: + type: + title: "Access token is required for authentication requests." + const: "access_token" + type: "string" + enum: + - "access_token" + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + required: + - "type" + - "access_token" + - title: "Username Password" + type: "object" + properties: + type: + title: + "Both username and password is required for authentication\ + \ request." + const: "username_password" + type: "string" + enum: + - "username_password" + username: + type: "string" + description: "Add Username for authentication." + password: + type: "string" + description: "Add Password for authentication." + airbyte_secret: true + required: + - "type" + - "username" + - "password" + report_granularity: + title: "Granularity for periodic reports." + description: + "The granularity used for periodic data in reports. See the docs." + enum: + - "daily" + - "weekly" + - "monthly" + order: 1 + type: "string" + geo_location_breakdown: + title: "Granularity for geo-location region." + description: "The granularity used for geo location data in reports." + enum: + - "country" + - "region" + - "subregion" + order: 2 + type: "string" + start_date: + type: "string" + order: 3 + description: + "Date in the format YYYY-MM-DD eg. 2017-01-25. Any data before\ + \ this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + order: 4 + description: "Date in the format YYYY-MM-DD." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + source-k6-cloud: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + title: "Api Token" + description: + "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "k6-cloud" + const: "k6-cloud" + enum: + - "k6-cloud" + order: 0 + type: "string" + source-k6-cloud-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + title: "Api Token" + description: + "Your API Token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + source-postgres: + title: "Postgres Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "sourceType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + group: "db" + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + group: "db" + database: + title: "Database Name" + description: "Name of the database." + type: "string" + order: 2 + group: "db" + schemas: + title: "Schemas" + description: + "The list of schemas (case sensitive) to sync from. Defaults\ + \ to public." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "public" + order: 3 + group: "db" + username: + title: "Username" + description: "Username to access the database." + type: "string" + order: 4 + group: "auth" + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + group: "auth" + always_show: true + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 6 + group: "advanced" + pattern_descriptor: "key1=value1&key2=value2" + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n Read more in the docs." + type: "object" + order: 8 + group: "security" + oneOf: + - title: "disable" + additionalProperties: true + description: + "Disables encryption of communication between Airbyte and\ + \ source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + order: 0 + enum: + - "disable" + - title: "allow" + additionalProperties: true + description: "Enables encryption only when required by the source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + order: 0 + enum: + - "allow" + - title: "prefer" + additionalProperties: true + description: + "Allows unencrypted connection only if the source database\ + \ does not support encryption." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + order: 0 + enum: + - "prefer" + - title: "require" + additionalProperties: true + description: + "Always require encryption. If the source database server\ + \ does not support encryption, connection will fail." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + order: 0 + enum: + - "require" + - title: "verify-ca" + additionalProperties: true + description: + "Always require encryption and verifies that the source database\ + \ server has a valid SSL certificate." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + order: 0 + enum: + - "verify-ca" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: true + description: + "This is the most secure mode. Always require encryption\ + \ and verifies the identity of the source database server." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + order: 0 + enum: + - "verify-full" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 9 + group: "advanced" + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Write-Ahead Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." + required: + - "method" + - "replication_slot" + - "publication" + additionalProperties: true + properties: + method: + type: "string" + const: "CDC" + order: 1 + enum: + - "CDC" + plugin: + type: "string" + title: "Plugin" + description: + "A logical decoding plugin installed on the PostgreSQL\ + \ server." + enum: + - "pgoutput" + default: "pgoutput" + order: 2 + replication_slot: + type: "string" + title: "Replication Slot" + description: + "A plugin logical replication slot. Read about replication slots." + order: 3 + publication: + type: "string" + title: "Publication" + description: + "A Postgres publication used for consuming changes. Read\ + \ about publications and replication identities." + order: 4 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 1200 seconds. Valid range: 120 seconds to 2400 seconds. Read about\ + \ initial waiting time." + default: 1200 + order: 5 + min: 120 + max: 2400 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 6 + min: 1000 + max: 10000 + lsn_commit_behaviour: + type: "string" + title: "LSN commit behaviour" + description: + "Determines when Airbyte should flush the LSN of processed\ + \ WAL logs in the source database. `After loading Data in the destination`\ + \ is default. If `While reading Data` is selected, in case of a\ + \ downstream failure (while loading data into the destination),\ + \ next sync would result in a full sync." + enum: + - "While reading Data" + - "After loading Data in the destination" + default: "After loading Data in the destination" + order: 7 + heartbeat_action_query: + type: "string" + title: "Debezium heartbeat query (Advanced)" + description: + "Specifies a query that the connector executes on the\ + \ source database when the connector sends a heartbeat message.\ + \ Please see the setup guide for how and when to configure this setting." + default: "" + order: 8 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 9 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 10 + - title: "Detect Changes with Xmin System Column" + description: + "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Suitable for databases that have low transaction\ + \ pressure." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + order: 8 + enum: + - "Standard" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + group: "security" + sourceType: + title: "postgres" + const: "postgres" + enum: + - "postgres" + order: 0 + type: "string" + groups: + - id: "db" + - id: "auth" + - id: "security" + title: "Security" + - id: "advanced" + title: "Advanced" + source-postgres-update: + title: "Postgres Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + group: "db" + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + group: "db" + database: + title: "Database Name" + description: "Name of the database." + type: "string" + order: 2 + group: "db" + schemas: + title: "Schemas" + description: + "The list of schemas (case sensitive) to sync from. Defaults\ + \ to public." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "public" + order: 3 + group: "db" + username: + title: "Username" + description: "Username to access the database." + type: "string" + order: 4 + group: "auth" + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + group: "auth" + always_show: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 6 + group: "advanced" + pattern_descriptor: "key1=value1&key2=value2" + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n Read more in the docs." + type: "object" + order: 8 + group: "security" + oneOf: + - title: "disable" + additionalProperties: true + description: + "Disables encryption of communication between Airbyte and\ + \ source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + order: 0 + enum: + - "disable" + - title: "allow" + additionalProperties: true + description: "Enables encryption only when required by the source database." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + order: 0 + enum: + - "allow" + - title: "prefer" + additionalProperties: true + description: + "Allows unencrypted connection only if the source database\ + \ does not support encryption." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + order: 0 + enum: + - "prefer" + - title: "require" + additionalProperties: true + description: + "Always require encryption. If the source database server\ + \ does not support encryption, connection will fail." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + order: 0 + enum: + - "require" + - title: "verify-ca" + additionalProperties: true + description: + "Always require encryption and verifies that the source database\ + \ server has a valid SSL certificate." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + order: 0 + enum: + - "verify-ca" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: true + description: + "This is the most secure mode. Always require encryption\ + \ and verifies the identity of the source database server." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + order: 0 + enum: + - "verify-full" + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 9 + group: "advanced" + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Write-Ahead Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." + required: + - "method" + - "replication_slot" + - "publication" + additionalProperties: true + properties: + method: + type: "string" + const: "CDC" + order: 1 + enum: + - "CDC" + plugin: + type: "string" + title: "Plugin" + description: + "A logical decoding plugin installed on the PostgreSQL\ + \ server." + enum: + - "pgoutput" + default: "pgoutput" + order: 2 + replication_slot: + type: "string" + title: "Replication Slot" + description: + "A plugin logical replication slot. Read about replication slots." + order: 3 + publication: + type: "string" + title: "Publication" + description: + "A Postgres publication used for consuming changes. Read\ + \ about publications and replication identities." + order: 4 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 1200 seconds. Valid range: 120 seconds to 2400 seconds. Read about\ + \ initial waiting time." + default: 1200 + order: 5 + min: 120 + max: 2400 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 6 + min: 1000 + max: 10000 + lsn_commit_behaviour: + type: "string" + title: "LSN commit behaviour" + description: + "Determines when Airbyte should flush the LSN of processed\ + \ WAL logs in the source database. `After loading Data in the destination`\ + \ is default. If `While reading Data` is selected, in case of a\ + \ downstream failure (while loading data into the destination),\ + \ next sync would result in a full sync." + enum: + - "While reading Data" + - "After loading Data in the destination" + default: "After loading Data in the destination" + order: 7 + heartbeat_action_query: + type: "string" + title: "Debezium heartbeat query (Advanced)" + description: + "Specifies a query that the connector executes on the\ + \ source database when the connector sends a heartbeat message.\ + \ Please see the setup guide for how and when to configure this setting." + default: "" + order: 8 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 9 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 10 + - title: "Detect Changes with Xmin System Column" + description: + "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Suitable for databases that have low transaction\ + \ pressure." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "Standard" + order: 8 + enum: + - "Standard" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + group: "security" + groups: + - id: "db" + - id: "auth" + - id: "security" + title: "Security" + - id: "advanced" + title: "Advanced" + source-buildkite: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "buildkite" + const: "buildkite" + enum: + - "buildkite" + order: 0 + type: "string" + source-buildkite-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-fauna: + title: "Fauna Spec" + type: "object" + required: + - "domain" + - "port" + - "scheme" + - "secret" + - "sourceType" + properties: + domain: + order: 0 + type: "string" + title: "Domain" + description: + "Domain of Fauna to query. Defaults db.fauna.com. See the\ + \ docs." + default: "db.fauna.com" + port: + order: 1 + type: "integer" + title: "Port" + description: "Endpoint port." + default: 443 + scheme: + order: 2 + type: "string" + title: "Scheme" + description: "URL scheme." + default: "https" + secret: + order: 3 + type: "string" + title: "Fauna Secret" + description: "Fauna secret, used when authenticating with the database." + airbyte_secret: true + x-speakeasy-param-sensitive: true + collection: + order: 5 + type: "object" + title: "Collection" + description: "Settings for the Fauna Collection." + required: + - "page_size" + - "deletions" + properties: + page_size: + order: 4 + type: "integer" + title: "Page Size" + default: 64 + description: + "The page size used when reading documents from the database.\ + \ The larger the page size, the faster the connector processes documents.\ + \ However, if a page is too large, the connector may fail.
    \n\ + Choose your page size based on how large the documents are.
    \n\ + See the docs." + deletions: + order: 5 + type: "object" + title: "Deletion Mode" + description: + "This only applies to incremental syncs.
    \n\ + Enabling deletion mode informs your destination of deleted documents.
    \n\ + Disabled - Leave this feature disabled, and ignore deleted documents.
    \n\ + Enabled - Enables this feature. When a document is deleted, the connector\ + \ exports a record with a \"deleted at\" column containing the time\ + \ that the document was deleted." + oneOf: + - title: "Disabled" + type: "object" + order: 0 + required: + - "deletion_mode" + properties: + deletion_mode: + type: "string" + const: "ignore" + enum: + - "ignore" + - title: "Enabled" + type: "object" + order: 1 + required: + - "deletion_mode" + - "column" + properties: + deletion_mode: + type: "string" + const: "deleted_field" + enum: + - "deleted_field" + column: + type: "string" + title: "Deleted At Column" + description: 'Name of the "deleted at" column.' + default: "deleted_at" + sourceType: + title: "fauna" + const: "fauna" + enum: + - "fauna" + order: 0 + type: "string" + source-fauna-update: + title: "Fauna Spec" + type: "object" + required: + - "domain" + - "port" + - "scheme" + - "secret" + properties: + domain: + order: 0 + type: "string" + title: "Domain" + description: + "Domain of Fauna to query. Defaults db.fauna.com. See the\ + \ docs." + default: "db.fauna.com" + port: + order: 1 + type: "integer" + title: "Port" + description: "Endpoint port." + default: 443 + scheme: + order: 2 + type: "string" + title: "Scheme" + description: "URL scheme." + default: "https" + secret: + order: 3 + type: "string" + title: "Fauna Secret" + description: "Fauna secret, used when authenticating with the database." + airbyte_secret: true + collection: + order: 5 + type: "object" + title: "Collection" + description: "Settings for the Fauna Collection." + required: + - "page_size" + - "deletions" + properties: + page_size: + order: 4 + type: "integer" + title: "Page Size" + default: 64 + description: + "The page size used when reading documents from the database.\ + \ The larger the page size, the faster the connector processes documents.\ + \ However, if a page is too large, the connector may fail.
    \n\ + Choose your page size based on how large the documents are.
    \n\ + See the docs." + deletions: + order: 5 + type: "object" + title: "Deletion Mode" + description: + "This only applies to incremental syncs.
    \n\ + Enabling deletion mode informs your destination of deleted documents.
    \n\ + Disabled - Leave this feature disabled, and ignore deleted documents.
    \n\ + Enabled - Enables this feature. When a document is deleted, the connector\ + \ exports a record with a \"deleted at\" column containing the time\ + \ that the document was deleted." + oneOf: + - title: "Disabled" + type: "object" + order: 0 + required: + - "deletion_mode" + properties: + deletion_mode: + type: "string" + const: "ignore" + enum: + - "ignore" + - title: "Enabled" + type: "object" + order: 1 + required: + - "deletion_mode" + - "column" + properties: + deletion_mode: + type: "string" + const: "deleted_field" + enum: + - "deleted_field" + column: + type: "string" + title: "Deleted At Column" + description: 'Name of the "deleted at" column.' + default: "deleted_at" + source-twilio: + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + - "sourceType" + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + x-speakeasy-param-sensitive: true + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + x-speakeasy-param-sensitive: true + start_date: + title: "Replication Start Date" + description: + "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + format: "date-time" + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + sourceType: + title: "twilio" + const: "twilio" + enum: + - "twilio" + order: 0 + type: "string" + source-twilio-update: + title: "Twilio Spec" + type: "object" + required: + - "account_sid" + - "auth_token" + - "start_date" + properties: + account_sid: + title: "Account ID" + description: "Twilio account SID" + airbyte_secret: true + type: "string" + order: 1 + auth_token: + title: "Auth Token" + description: "Twilio Auth Token." + airbyte_secret: true + type: "string" + order: 2 + start_date: + title: "Replication Start Date" + description: + "UTC date and time in the format 2020-10-01T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-10-01T00:00:00Z" + type: "string" + order: 3 + format: "date-time" + lookback_window: + title: "Lookback window" + description: "How far into the past to look for records. (in minutes)" + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 4 + source-sendgrid: + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 0 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + description: + "Sendgrid API Key, use admin to generate this key." + x-speakeasy-param-sensitive: true + sourceType: + title: "sendgrid" + const: "sendgrid" + enum: + - "sendgrid" + order: 0 + type: "string" + source-sendgrid-update: + type: "object" + required: + - "start_date" + - "api_key" + properties: + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 0 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 1 + description: + "Sendgrid API Key, use admin to generate this key." + source-gnews: + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + order: 1 + title: "Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: + "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: + "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: + "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: + "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 7 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: + "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 8 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 9 + title: "Top Headlines Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 10 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + sourceType: + title: "gnews" + const: "gnews" + enum: + - "gnews" + order: 0 + type: "string" + source-gnews-update: + title: "Gnews Spec" + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + order: 0 + airbyte_secret: true + query: + type: "string" + order: 1 + title: "Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + language: + type: "string" + title: "Language" + decription: + "This parameter allows you to specify the language of the news\ + \ articles returned by the API. You have to set as value the 2 letters\ + \ code of the language you want to filter." + order: 2 + enum: + - "ar" + - "zh" + - "nl" + - "en" + - "fr" + - "de" + - "el" + - "he" + - "hi" + - "it" + - "ja" + - "ml" + - "mr" + - "no" + - "pt" + - "ro" + - "ru" + - "es" + - "sv" + - "ta" + - "te" + - "uk" + country: + type: "string" + title: "Country" + description: + "This parameter allows you to specify the country where the\ + \ news articles returned by the API were published, the contents of the\ + \ articles are not necessarily related to the specified country. You have\ + \ to set as value the 2 letters code of the country you want to filter." + order: 3 + enum: + - "au" + - "br" + - "ca" + - "cn" + - "eg" + - "fr" + - "de" + - "gr" + - "hk" + - "in" + - "ie" + - "il" + - "it" + - "jp" + - "nl" + - "no" + - "pk" + - "pe" + - "ph" + - "pt" + - "ro" + - "ru" + - "sg" + - "es" + - "se" + - "ch" + - "tw" + - "ua" + - "gb" + - "us" + in: + type: "array" + title: "In" + description: + "This parameter allows you to choose in which attributes the\ + \ keywords are searched. The attributes that can be set are title, description\ + \ and content. It is possible to combine several attributes." + order: 4 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + nullable: + type: "array" + title: "Nullable" + description: + "This parameter allows you to specify the attributes that you\ + \ allow to return null values. The attributes that can be set are title,\ + \ description and content. It is possible to combine several attributes" + order: 5 + items: + type: "string" + enum: + - "title" + - "description" + - "content" + start_date: + type: "string" + title: "Start Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date greater than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 6 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + end_date: + type: "string" + title: "End Date" + description: + "This parameter allows you to filter the articles that have\ + \ a publication date smaller than or equal to the specified value. The\ + \ date must respect the following format: YYYY-MM-DD hh:mm:ss (in UTC)" + order: 7 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}$" + examples: + - "2022-08-21 16:27:09" + sortby: + type: "string" + title: "Sort By" + description: + "This parameter allows you to choose with which type of sorting\ + \ the articles should be returned. Two values are possible:\n - publishedAt\ + \ = sort by publication date, the articles with the most recent publication\ + \ date are returned first\n - relevance = sort by best match to keywords,\ + \ the articles with the best match are returned first" + order: 8 + enum: + - "publishedAt" + - "relevance" + top_headlines_query: + type: "string" + order: 9 + title: "Top Headlines Query" + description: + "This parameter allows you to specify your search keywords\ + \ to find the news articles you are looking for. The keywords will be\ + \ used to return the most relevant articles. It is possible to use logical\ + \ operators with keywords. - Phrase Search Operator: This operator allows\ + \ you to make an exact search. Keywords surrounded by \n quotation marks\ + \ are used to search for articles with the exact same keyword sequence.\ + \ \n For example the query: \"Apple iPhone\" will return articles matching\ + \ at least once this sequence of keywords.\n- Logical AND Operator: This\ + \ operator allows you to make sure that several keywords are all used\ + \ in the article\n search. By default the space character acts as an\ + \ AND operator, it is possible to replace the space character \n by AND\ + \ to obtain the same result. For example the query: Apple Microsoft is\ + \ equivalent to Apple AND Microsoft\n- Logical OR Operator: This operator\ + \ allows you to retrieve articles matching the keyword a or the keyword\ + \ b.\n It is important to note that this operator has a higher precedence\ + \ than the AND operator. For example the \n query: Apple OR Microsoft\ + \ will return all articles matching the keyword Apple as well as all articles\ + \ matching \n the keyword Microsoft\n- Logical NOT Operator: This operator\ + \ allows you to remove from the results the articles corresponding to\ + \ the\n specified keywords. To use it, you need to add NOT in front of\ + \ each word or phrase surrounded by quotes.\n For example the query:\ + \ Apple NOT iPhone will return all articles matching the keyword Apple\ + \ but not the keyword\n iPhone" + examples: + - "Microsoft Windows 10" + - "Apple OR Microsoft" + - "Apple AND NOT iPhone" + - "(Windows 7) AND (Windows 10)" + - "Intel AND (i7 OR i9)" + top_headlines_topic: + type: "string" + title: "Top Headlines Topic" + description: "This parameter allows you to change the category for the request." + order: 10 + enum: + - "breaking-news" + - "world" + - "nation" + - "business" + - "technology" + - "entertainment" + - "sports" + - "science" + - "health" + source-google-ads: + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + type: "object" + description: "" + title: "Google Credentials" + order: 0 + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + order: 3 + description: + "The token used to obtain a new Access Token. For detailed\ + \ instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + order: 4 + description: + "The Access Token for making authenticated requests. For\ + \ detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + x-speakeasy-param-sensitive: true + customer_id: + title: "Customer ID(s)" + type: "string" + description: + "Comma-separated list of (client) customer IDs. Each customer\ + \ ID must be specified as a 10-digit number without dashes. For detailed\ + \ instructions on finding this value, refer to our documentation." + pattern: "^[0-9]{10}(,[0-9]{10})*$" + pattern_descriptor: + "The customer ID must be 10 digits. Separate multiple\ + \ customer IDs using commas." + examples: + - "6783948572,5839201945" + order: 1 + customer_status_filter: + title: "Customer Statuses Filter" + description: + "A list of customer statuses to filter on. For detailed info\ + \ about what each status mean refer to Google Ads documentation." + default: [] + order: 2 + type: "array" + items: + title: "CustomerStatus" + description: "An enumeration." + enum: + - "UNKNOWN" + - "ENABLED" + - "CANCELED" + - "SUSPENDED" + - "CLOSED" + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-25" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. (Default value of today is used if not set)" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-30" + order: 4 + format: "date" + custom_queries_array: + type: "array" + title: "Custom GAQL Queries" + description: "" + order: 5 + items: + type: "object" + required: + - "query" + - "table_name" + properties: + query: + type: "string" + multiline: true + title: "Custom Query" + description: + "A custom defined GAQL query for building the report.\ + \ Avoid including the segments.date field; wherever possible, Airbyte\ + \ will automatically include it for incremental syncs. For more\ + \ information, refer to Google's documentation." + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination Table Name" + description: + "The table name in your destination database for the\ + \ chosen query." + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: + "A conversion window is the number of days after an ad interaction\ + \ (such as an ad click or video view) during which a conversion, such\ + \ as a purchase, is recorded in Google Ads. For more information, see\ + \ Google's documentation." + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + order: 6 + sourceType: + title: "google-ads" + const: "google-ads" + enum: + - "google-ads" + order: 0 + type: "string" + source-google-ads-update: + title: "Google Ads Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + type: "object" + description: "" + title: "Google Credentials" + order: 0 + required: + - "developer_token" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + order: 3 + description: + "The token used to obtain a new Access Token. For detailed\ + \ instructions on finding this value, refer to our documentation." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + order: 4 + description: + "The Access Token for making authenticated requests. For\ + \ detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + customer_id: + title: "Customer ID(s)" + type: "string" + description: + "Comma-separated list of (client) customer IDs. Each customer\ + \ ID must be specified as a 10-digit number without dashes. For detailed\ + \ instructions on finding this value, refer to our documentation." + pattern: "^[0-9]{10}(,[0-9]{10})*$" + pattern_descriptor: + "The customer ID must be 10 digits. Separate multiple\ + \ customer IDs using commas." + examples: + - "6783948572,5839201945" + order: 1 + customer_status_filter: + title: "Customer Statuses Filter" + description: + "A list of customer statuses to filter on. For detailed info\ + \ about what each status mean refer to Google Ads documentation." + default: [] + order: 2 + type: "array" + items: + title: "CustomerStatus" + description: "An enumeration." + enum: + - "UNKNOWN" + - "ENABLED" + - "CANCELED" + - "SUSPENDED" + - "CLOSED" + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-25" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. (Default value of today is used if not set)" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2017-01-30" + order: 4 + format: "date" + custom_queries_array: + type: "array" + title: "Custom GAQL Queries" + description: "" + order: 5 + items: + type: "object" + required: + - "query" + - "table_name" + properties: + query: + type: "string" + multiline: true + title: "Custom Query" + description: + "A custom defined GAQL query for building the report.\ + \ Avoid including the segments.date field; wherever possible, Airbyte\ + \ will automatically include it for incremental syncs. For more\ + \ information, refer to Google's documentation." + examples: + - "SELECT segments.ad_destination_type, campaign.advertising_channel_sub_type\ + \ FROM campaign WHERE campaign.status = 'PAUSED'" + table_name: + type: "string" + title: "Destination Table Name" + description: + "The table name in your destination database for the\ + \ chosen query." + conversion_window_days: + title: "Conversion Window" + type: "integer" + description: + "A conversion window is the number of days after an ad interaction\ + \ (such as an ad click or video view) during which a conversion, such\ + \ as a purchase, is recorded in Google Ads. For more information, see\ + \ Google's documentation." + minimum: 0 + maximum: 1095 + default: 14 + examples: + - 14 + order: 6 + source-google-search-console: + title: "Google Search Console Spec" + type: "object" + required: + - "site_urls" + - "authorization" + - "sourceType" + properties: + site_urls: + type: "array" + items: + type: "string" + title: "Website URL Property" + description: + "The URLs of the website property attached to your GSC account.\ + \ Learn more about properties here." + examples: + - "https://example1.com/" + - "sc-domain:example2.com" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true + order: 1 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." + examples: + - "2021-12-12" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + order: 2 + format: "date" + authorization: + type: "object" + title: "Authentication Type" + description: "" + order: 3 + oneOf: + - title: "OAuth" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "Access token for making authenticated requests. Read\ + \ more here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "The token for obtaining a new access token. Read more\ + \ here." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + title: "Admin Email" + type: "string" + description: + "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: + "A list of available dimensions. Please note, that for\ + \ technical reasons `date` is the default dimension which will be\ + \ included in your query whether you specify it or not. Primary\ + \ key will consist of your custom dimensions and the default dimension\ + \ along with `site_url` and `search_type`." + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + default: + - "date" + minItems: 0 + required: + - "name" + - "dimensions" + data_state: + type: "string" + title: "Data Freshness" + enum: + - "final" + - "all" + description: + "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." + examples: + - "final" + - "all" + default: "final" + order: 6 + sourceType: + title: "google-search-console" + const: "google-search-console" + enum: + - "google-search-console" + order: 0 + type: "string" + source-google-search-console-update: + title: "Google Search Console Spec" + type: "object" + required: + - "site_urls" + - "authorization" + properties: + site_urls: + type: "array" + items: + type: "string" + title: "Website URL Property" + description: + "The URLs of the website property attached to your GSC account.\ + \ Learn more about properties here." + examples: + - "https://example1.com/" + - "sc-domain:example2.com" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true + order: 1 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." + examples: + - "2021-12-12" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + order: 2 + format: "date" + authorization: + type: "object" + title: "Authentication Type" + description: "" + order: 3 + oneOf: + - title: "OAuth" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "Access token for making authenticated requests. Read\ + \ more here." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "The token for obtaining a new access token. Read more\ + \ here." + airbyte_secret: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "auth_type" + - "service_account_info" + - "email" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + service_account_info: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + email: + title: "Admin Email" + type: "string" + description: + "The email of the user which has permissions to access\ + \ the Google Workspace Admin APIs." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: + "A list of available dimensions. Please note, that for\ + \ technical reasons `date` is the default dimension which will be\ + \ included in your query whether you specify it or not. Primary\ + \ key will consist of your custom dimensions and the default dimension\ + \ along with `site_url` and `search_type`." + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + default: + - "date" + minItems: 0 + required: + - "name" + - "dimensions" + data_state: + type: "string" + title: "Data Freshness" + enum: + - "final" + - "all" + description: + "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." + examples: + - "final" + - "all" + default: "final" + order: 6 + source-kyve: + title: "KYVE Spec" + type: "object" + required: + - "pool_ids" + - "start_ids" + - "url_base" + - "sourceType" + properties: + pool_ids: + type: "string" + title: "Pool-IDs" + description: + "The IDs of the KYVE storage pool you want to archive. (Comma\ + \ separated)" + order: 0 + examples: + - "0" + - "0,1" + start_ids: + type: "string" + title: "Bundle-Start-IDs" + description: + "The start-id defines, from which bundle id the pipeline should\ + \ start to extract the data. (Comma separated)" + order: 1 + examples: + - "0" + - "0,0" + url_base: + type: "string" + title: "KYVE-API URL Base" + description: "URL to the KYVE Chain API." + default: "https://api.kyve.network" + order: 2 + examples: + - "https://api.kaon.kyve.network/" + - "https://api.korellia.kyve.network/" + sourceType: + title: "kyve" + const: "kyve" + enum: + - "kyve" + order: 0 + type: "string" + source-kyve-update: + title: "KYVE Spec" + type: "object" + required: + - "pool_ids" + - "start_ids" + - "url_base" + properties: + pool_ids: + type: "string" + title: "Pool-IDs" + description: + "The IDs of the KYVE storage pool you want to archive. (Comma\ + \ separated)" + order: 0 + examples: + - "0" + - "0,1" + start_ids: + type: "string" + title: "Bundle-Start-IDs" + description: + "The start-id defines, from which bundle id the pipeline should\ + \ start to extract the data. (Comma separated)" + order: 1 + examples: + - "0" + - "0,0" + url_base: + type: "string" + title: "KYVE-API URL Base" + description: "URL to the KYVE Chain API." + default: "https://api.kyve.network" + order: 2 + examples: + - "https://api.kaon.kyve.network/" + - "https://api.korellia.kyve.network/" + source-strava: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "athlete_id" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + description: "The Client ID of your Strava developer application." + title: "Client ID" + pattern: "^[0-9_\\-]+$" + examples: + - "12345" + order: 0 + client_secret: + type: "string" + description: "The Client Secret of your Strava developer application." + title: "Client Secret" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + description: "The Refresh Token with the activity: read_all permissions." + title: "Refresh Token" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + athlete_id: + type: "integer" + description: "The Athlete ID of your Strava developer application." + title: "Athlete ID" + pattern: "^[0-9_\\-]+$" + examples: + - "17831421" + order: 3 + start_date: + type: "string" + description: "UTC date and time. Any data before this date will not be replicated." + title: "Start Date" + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 4 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 5 + sourceType: + title: "strava" + const: "strava" + enum: + - "strava" + order: 0 + type: "string" + source-strava-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "athlete_id" + - "start_date" + properties: + client_id: + type: "string" + description: "The Client ID of your Strava developer application." + title: "Client ID" + pattern: "^[0-9_\\-]+$" + examples: + - "12345" + order: 0 + client_secret: + type: "string" + description: "The Client Secret of your Strava developer application." + title: "Client Secret" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 1 + refresh_token: + type: "string" + description: "The Refresh Token with the activity: read_all permissions." + title: "Refresh Token" + pattern: "^[0-9a-fA-F]+$" + examples: + - "fc6243f283e51f6ca989aab298b17da125496f50" + airbyte_secret: true + order: 2 + athlete_id: + type: "integer" + description: "The Athlete ID of your Strava developer application." + title: "Athlete ID" + pattern: "^[0-9_\\-]+$" + examples: + - "17831421" + order: 3 + start_date: + type: "string" + description: "UTC date and time. Any data before this date will not be replicated." + title: "Start Date" + examples: + - "2021-03-01T00:00:00Z" + format: "date-time" + order: 4 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 5 + source-smaily: + type: "object" + required: + - "api_password" + - "api_subdomain" + - "api_username" + - "sourceType" + properties: + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + order: 1 + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + order: 2 + sourceType: + title: "smaily" + const: "smaily" + enum: + - "smaily" + order: 0 + type: "string" + source-smaily-update: + type: "object" + required: + - "api_password" + - "api_subdomain" + - "api_username" + properties: + api_password: + type: "string" + title: "API User Password" + description: "API user password. See https://smaily.com/help/api/general/create-api-user/" + airbyte_secret: true + order: 0 + api_subdomain: + type: "string" + title: "API Subdomain" + description: "API Subdomain. See https://smaily.com/help/api/general/create-api-user/" + order: 1 + api_username: + type: "string" + title: "API User Username" + description: "API user username. See https://smaily.com/help/api/general/create-api-user/" + order: 2 + source-height: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + search_query: + type: "string" + description: "Search query to be used with search stream" + title: "search_query" + default: "task" + order: 2 + sourceType: + title: "height" + const: "height" + enum: + - "height" + order: 0 + type: "string" + source-height-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + search_query: + type: "string" + description: "Search query to be used with search stream" + title: "search_query" + default: "task" + order: 2 + source-piwik: + type: "object" + required: + - "client_id" + - "client_secret" + - "organization_id" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: "The organization id appearing at URL of your piwik website" + order: 2 + title: "Organization ID" + sourceType: + title: "piwik" + const: "piwik" + enum: + - "piwik" + order: 0 + type: "string" + source-piwik-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "organization_id" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + organization_id: + type: "string" + description: "The organization id appearing at URL of your piwik website" + order: 2 + title: "Organization ID" + source-polygon-stock-api: + type: "object" + required: + - "apiKey" + - "end_date" + - "multiplier" + - "start_date" + - "stocksTicker" + - "timespan" + - "sourceType" + properties: + sort: + type: "string" + order: 5 + title: "Sort" + examples: + - "asc" + - "desc" + description: + "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + limit: + type: "integer" + order: 3 + title: "Limit" + examples: + - 100 + - 120 + description: "The target date for the aggregate window." + apiKey: + type: "string" + order: 1 + title: "API Key" + description: "Your API ACCESS Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + adjusted: + type: "string" + order: 0 + title: "Adjusted" + examples: + - "true" + - "false" + description: + "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + end_date: + type: "string" + order: 2 + title: "End Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The target date for the aggregate window." + timespan: + type: "string" + order: 8 + title: "Timespan" + examples: + - "day" + description: "The size of the time window." + multiplier: + type: "integer" + order: 4 + title: "Multiplier" + examples: + - 1 + - 2 + description: "The size of the timespan multiplier." + start_date: + type: "string" + order: 6 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The beginning date for the aggregate window." + stocksTicker: + type: "string" + order: 7 + title: "Stock Ticker" + examples: + - "IBM" + - "MSFT" + description: "The exchange symbol that this item is traded under." + sourceType: + title: "polygon-stock-api" + const: "polygon-stock-api" + enum: + - "polygon-stock-api" + order: 0 + type: "string" + source-polygon-stock-api-update: + type: "object" + required: + - "apiKey" + - "end_date" + - "multiplier" + - "start_date" + - "stocksTicker" + - "timespan" + properties: + sort: + type: "string" + order: 5 + title: "Sort" + examples: + - "asc" + - "desc" + description: + "Sort the results by timestamp. asc will return results in\ + \ ascending order (oldest at the top), desc will return results in descending\ + \ order (newest at the top)." + limit: + type: "integer" + order: 3 + title: "Limit" + examples: + - 100 + - 120 + description: "The target date for the aggregate window." + apiKey: + type: "string" + order: 1 + title: "API Key" + description: "Your API ACCESS Key" + airbyte_secret: true + adjusted: + type: "string" + order: 0 + title: "Adjusted" + examples: + - "true" + - "false" + description: + "Determines whether or not the results are adjusted for splits.\ + \ By default, results are adjusted and set to true. Set this to false\ + \ to get results that are NOT adjusted for splits." + end_date: + type: "string" + order: 2 + title: "End Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The target date for the aggregate window." + timespan: + type: "string" + order: 8 + title: "Timespan" + examples: + - "day" + description: "The size of the time window." + multiplier: + type: "integer" + order: 4 + title: "Multiplier" + examples: + - 1 + - 2 + description: "The size of the timespan multiplier." + start_date: + type: "string" + order: 6 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2020-10-14" + description: "The beginning date for the aggregate window." + stocksTicker: + type: "string" + order: 7 + title: "Stock Ticker" + examples: + - "IBM" + - "MSFT" + description: "The exchange symbol that this item is traded under." + source-shopify: + title: "Shopify Source CDK Specifications" + type: "object" + required: + - "shop" + - "sourceType" + properties: + shop: + type: "string" + title: "Shopify Store" + description: + "The name of your Shopify store found in the URL. For example,\ + \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ + \ or 'NAME.myshopify.com'." + pattern: "^(?!https://)(?!https://).*" + examples: + - "my-store" + - "my-store.myshopify.com" + order: 1 + credentials: + title: "Shopify Authorization Method" + description: "The authorization method to use to retrieve data from Shopify" + type: "object" + order: 2 + oneOf: + - type: "object" + title: "OAuth2.0" + description: "OAuth2.0" + required: + - "auth_method" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "The Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: + "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Replication Start Date" + description: + "The date you would like to replicate data from. Format: YYYY-MM-DD.\ + \ Any data before this date will not be replicated." + default: "2020-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 3 + bulk_window_in_days: + type: "integer" + title: "GraphQL BULK Date Range in Days" + description: "Defines what would be a date range per single BULK Job" + default: 30 + fetch_transactions_user_id: + type: "boolean" + title: "Add `user_id` to Transactions (slower)" + description: + "Defines which API type (REST/BULK) to use to fetch `Transactions`\ + \ data. If you are a `Shopify Plus` user, leave the default value to speed\ + \ up the fetch." + default: false + job_product_variants_include_pres_prices: + type: "boolean" + title: "Add `Presentment prices` to Product Variants" + description: + "If enabled, the `Product Variants` stream attempts to include\ + \ `Presentment prices` field (may affect the performance)." + default: true + job_termination_threshold: + type: "integer" + title: "BULK Job termination threshold" + description: + "The max time in seconds, after which the single BULK Job should\ + \ be `CANCELED` and retried. The bigger the value the longer the BULK\ + \ Job is allowed to run." + default: 7200 + minimum: 3600 + maximum: 21600 + job_checkpoint_interval: + type: "integer" + title: "BULK Job checkpoint (rows collected)" + description: "The threshold, after which the single BULK Job should be checkpointed." + default: 100000 + minimum: 15000 + maximum: 200000 + sourceType: + title: "shopify" + const: "shopify" + enum: + - "shopify" + order: 0 + type: "string" + source-shopify-update: + title: "Shopify Source CDK Specifications" + type: "object" + required: + - "shop" + properties: + shop: + type: "string" + title: "Shopify Store" + description: + "The name of your Shopify store found in the URL. For example,\ + \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ + \ or 'NAME.myshopify.com'." + pattern: "^(?!https://)(?!https://).*" + examples: + - "my-store" + - "my-store.myshopify.com" + order: 1 + credentials: + title: "Shopify Authorization Method" + description: "The authorization method to use to retrieve data from Shopify" + type: "object" + order: 2 + oneOf: + - type: "object" + title: "OAuth2.0" + description: "OAuth2.0" + required: + - "auth_method" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "The Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: + "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Replication Start Date" + description: + "The date you would like to replicate data from. Format: YYYY-MM-DD.\ + \ Any data before this date will not be replicated." + default: "2020-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + order: 3 + bulk_window_in_days: + type: "integer" + title: "GraphQL BULK Date Range in Days" + description: "Defines what would be a date range per single BULK Job" + default: 30 + fetch_transactions_user_id: + type: "boolean" + title: "Add `user_id` to Transactions (slower)" + description: + "Defines which API type (REST/BULK) to use to fetch `Transactions`\ + \ data. If you are a `Shopify Plus` user, leave the default value to speed\ + \ up the fetch." + default: false + job_product_variants_include_pres_prices: + type: "boolean" + title: "Add `Presentment prices` to Product Variants" + description: + "If enabled, the `Product Variants` stream attempts to include\ + \ `Presentment prices` field (may affect the performance)." + default: true + job_termination_threshold: + type: "integer" + title: "BULK Job termination threshold" + description: + "The max time in seconds, after which the single BULK Job should\ + \ be `CANCELED` and retried. The bigger the value the longer the BULK\ + \ Job is allowed to run." + default: 7200 + minimum: 3600 + maximum: 21600 + job_checkpoint_interval: + type: "integer" + title: "BULK Job checkpoint (rows collected)" + description: "The threshold, after which the single BULK Job should be checkpointed." + default: 100000 + minimum: 15000 + maximum: 200000 + source-omnisend: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "omnisend" + const: "omnisend" + enum: + - "omnisend" + order: 0 + type: "string" + source-omnisend-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API Key" + order: 0 + source-mongodb-v2: + title: "MongoDb Source Spec" + type: "object" + required: + - "database_config" + - "sourceType" + properties: + database_config: + type: "object" + title: "Cluster Type" + description: "Configures the MongoDB cluster type." + order: 1 + group: "connection" + display_type: "radio" + oneOf: + - title: "MongoDB Atlas Replica Set" + description: "MongoDB Atlas-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + - "username" + - "password" + - "auth_source" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "ATLAS_REPLICA_SET" + order: 1 + enum: + - "ATLAS_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate." + examples: + - "mongodb+srv://cluster0.abcd1.mongodb.net/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\ + \ for more details." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + - title: "Self-Managed Replica Set" + description: "MongoDB self-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "SELF_MANAGED_REPLICA_SET" + order: 1 + enum: + - "SELF_MANAGED_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\ + \ for more information." + examples: + - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/" + - "mongodb://example.host.com:27017/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to 300 seconds.\ + \ Valid range: 120 seconds to 1200 seconds." + default: 300 + order: 8 + min: 120 + max: 1200 + group: "advanced" + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with memory\ + \ consumption and efficiency of the connector, please be careful." + default: 10000 + order: 9 + min: 1000 + max: 10000 + group: "advanced" + discover_sample_size: + type: "integer" + title: "Document discovery sample size (Advanced)" + description: + "The maximum number of documents to sample when attempting\ + \ to discover the unique fields for a collection." + default: 10000 + order: 10 + minimum: 10 + maximum: 100000 + group: "advanced" + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data in\ + \ case of an stale/invalid cursor value into the WAL. If 'Fail sync' is\ + \ chosen, a user will have to manually reset the connection before being\ + \ able to continue syncing data. If 'Re-sync data' is chosen, Airbyte\ + \ will automatically trigger a refresh but could lead to higher cloud\ + \ costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 11 + group: "advanced" + update_capture_mode: + type: "string" + title: "Capture mode (Advanced)" + description: + "Determines how Airbyte looks up the value of an updated document.\ + \ If 'Lookup' is chosen, the current value of the document will be read.\ + \ If 'Post Image' is chosen, then the version of the document immediately\ + \ after an update will be read. WARNING : Severe data loss will occur\ + \ if this option is chosen and the appropriate settings are not set on\ + \ your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images." + enum: + - "Lookup" + - "Post Image" + default: "Lookup" + order: 12 + group: "advanced" + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 13 + group: "advanced" + sourceType: + title: "mongodb-v2" + const: "mongodb-v2" + enum: + - "mongodb-v2" + order: 0 + type: "string" + groups: + - id: "connection" + - id: "advanced" + title: "Advanced" + source-mongodb-v2-update: + title: "MongoDb Source Spec" + type: "object" + required: + - "database_config" + properties: + database_config: + type: "object" + title: "Cluster Type" + description: "Configures the MongoDB cluster type." + order: 1 + group: "connection" + display_type: "radio" + oneOf: + - title: "MongoDB Atlas Replica Set" + description: "MongoDB Atlas-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + - "username" + - "password" + - "auth_source" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "ATLAS_REPLICA_SET" + order: 1 + enum: + - "ATLAS_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate." + examples: + - "mongodb+srv://cluster0.abcd1.mongodb.net/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored. See https://www.mongodb.com/docs/manual/reference/connection-string/#mongodb-urioption-urioption.authSource\ + \ for more details." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + - title: "Self-Managed Replica Set" + description: "MongoDB self-hosted cluster configured as a replica set" + required: + - "cluster_type" + - "connection_string" + - "database" + additionalProperties: true + properties: + cluster_type: + type: "string" + const: "SELF_MANAGED_REPLICA_SET" + order: 1 + enum: + - "SELF_MANAGED_REPLICA_SET" + connection_string: + title: "Connection String" + type: "string" + description: + "The connection string of the cluster that you want to\ + \ replicate. https://www.mongodb.com/docs/manual/reference/connection-string/#find-your-self-hosted-deployment-s-connection-string\ + \ for more information." + examples: + - "mongodb://example1.host.com:27017,example2.host.com:27017,example3.host.com:27017/" + - "mongodb://example.host.com:27017/" + order: 2 + database: + title: "Database Name" + type: "string" + description: + "The name of the MongoDB database that contains the collection(s)\ + \ to replicate." + order: 3 + username: + title: "Username" + type: "string" + description: "The username which is used to access the database." + order: 4 + password: + title: "Password" + type: "string" + description: "The password associated with this username." + airbyte_secret: true + order: 5 + auth_source: + title: "Authentication Source" + type: "string" + description: + "The authentication source where the user information\ + \ is stored." + default: "admin" + examples: + - "admin" + order: 6 + schema_enforced: + title: "Schema Enforced" + description: + "When enabled, syncs will validate and structure records\ + \ against the stream's schema." + default: true + type: "boolean" + always_show: true + order: 7 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to 300 seconds.\ + \ Valid range: 120 seconds to 1200 seconds." + default: 300 + order: 8 + min: 120 + max: 1200 + group: "advanced" + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with memory\ + \ consumption and efficiency of the connector, please be careful." + default: 10000 + order: 9 + min: 1000 + max: 10000 + group: "advanced" + discover_sample_size: + type: "integer" + title: "Document discovery sample size (Advanced)" + description: + "The maximum number of documents to sample when attempting\ + \ to discover the unique fields for a collection." + default: 10000 + order: 10 + minimum: 10 + maximum: 100000 + group: "advanced" + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data in\ + \ case of an stale/invalid cursor value into the WAL. If 'Fail sync' is\ + \ chosen, a user will have to manually reset the connection before being\ + \ able to continue syncing data. If 'Re-sync data' is chosen, Airbyte\ + \ will automatically trigger a refresh but could lead to higher cloud\ + \ costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 11 + group: "advanced" + update_capture_mode: + type: "string" + title: "Capture mode (Advanced)" + description: + "Determines how Airbyte looks up the value of an updated document.\ + \ If 'Lookup' is chosen, the current value of the document will be read.\ + \ If 'Post Image' is chosen, then the version of the document immediately\ + \ after an update will be read. WARNING : Severe data loss will occur\ + \ if this option is chosen and the appropriate settings are not set on\ + \ your Mongo instance : https://www.mongodb.com/docs/manual/changeStreams/#change-streams-with-document-pre-and-post-images." + enum: + - "Lookup" + - "Post Image" + default: "Lookup" + order: 12 + group: "advanced" + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 13 + group: "advanced" + groups: + - id: "connection" + - id: "advanced" + title: "Advanced" + source-retently: + title: "Retently Api Spec" + type: "object" + properties: + credentials: + title: "Authentication Mechanism" + description: "Choose how to authenticate to Retently" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Retently (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Retently Refresh Token which can be used to fetch new\ + \ Bearer Tokens when the current one expires." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with API Token" + required: + - "api_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_key: + title: "API Token" + description: + "Retently API Token. See the docs for more information on how to obtain this key." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "retently" + const: "retently" + enum: + - "retently" + order: 0 + type: "string" + source-retently-update: + title: "Retently Api Spec" + type: "object" + properties: + credentials: + title: "Authentication Mechanism" + description: "Choose how to authenticate to Retently" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Retently (OAuth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Retently Refresh Token which can be used to fetch new\ + \ Bearer Tokens when the current one expires." + airbyte_secret: true + - type: "object" + title: "Authenticate with API Token" + required: + - "api_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Token" + order: 0 + enum: + - "Token" + api_key: + title: "API Token" + description: + "Retently API Token. See the docs for more information on how to obtain this key." + type: "string" + airbyte_secret: true + source-coda: + type: "object" + required: + - "auth_token" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication token" + description: "Bearer token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "coda" + const: "coda" + enum: + - "coda" + order: 0 + type: "string" + source-coda-update: + type: "object" + required: + - "auth_token" + properties: + auth_token: + type: "string" + title: "Authentication token" + description: "Bearer token" + airbyte_secret: true + order: 0 + source-fleetio: + type: "object" + required: + - "api_key" + - "account_token" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_token: + type: "string" + order: 1 + title: "account_token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "fleetio" + const: "fleetio" + enum: + - "fleetio" + order: 0 + type: "string" + source-fleetio-update: + type: "object" + required: + - "api_key" + - "account_token" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + airbyte_secret: true + account_token: + type: "string" + order: 1 + title: "account_token" + airbyte_secret: true + source-pendo: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "pendo" + const: "pendo" + enum: + - "pendo" + order: 0 + type: "string" + source-pendo-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + source-jotform: + type: "object" + required: + - "api_key" + - "api_endpoint" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + end_date: + type: "string" + order: 3 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_endpoint: + type: "object" + oneOf: + - type: "object" + title: "Basic" + required: + - "url_prefix" + properties: + url_prefix: + type: "string" + description: + "You can access our API through the following URLs -\ + \ Standard API Usage (Use the default API URL - https://api.jotform.com),\ + \ For EU (Use the EU API URL - https://eu-api.jotform.com), For\ + \ HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)" + enum: + - "Standard" + - "EU" + - "HIPAA" + title: "Base URL Prefix" + default: "Standard" + api_endpoint: + type: "string" + const: "basic" + order: 0 + enum: + - "basic" + - type: "object" + title: "Enterprise" + required: + - "enterprise_url" + properties: + api_endpoint: + type: "string" + const: "enterprise" + order: 0 + enum: + - "enterprise" + enterprise_url: + type: "string" + description: + "Upgrade to Enterprise to make your API url your-domain.com/API\ + \ or subdomain.jotform.com/API instead of api.jotform.com" + title: "Enterprise URL" + order: 1 + title: "API Endpoint" + sourceType: + title: "jotform" + const: "jotform" + enum: + - "jotform" + order: 0 + type: "string" + source-jotform-update: + type: "object" + required: + - "api_key" + - "api_endpoint" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + end_date: + type: "string" + order: 3 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + api_endpoint: + type: "object" + oneOf: + - type: "object" + title: "Basic" + required: + - "url_prefix" + properties: + url_prefix: + type: "string" + description: + "You can access our API through the following URLs -\ + \ Standard API Usage (Use the default API URL - https://api.jotform.com),\ + \ For EU (Use the EU API URL - https://eu-api.jotform.com), For\ + \ HIPAA (Use the HIPAA API URL - https://hipaa-api.jotform.com)" + enum: + - "Standard" + - "EU" + - "HIPAA" + title: "Base URL Prefix" + default: "Standard" + api_endpoint: + type: "string" + const: "basic" + order: 0 + enum: + - "basic" + - type: "object" + title: "Enterprise" + required: + - "enterprise_url" + properties: + api_endpoint: + type: "string" + const: "enterprise" + order: 0 + enum: + - "enterprise" + enterprise_url: + type: "string" + description: + "Upgrade to Enterprise to make your API url your-domain.com/API\ + \ or subdomain.jotform.com/API instead of api.jotform.com" + title: "Enterprise URL" + order: 1 + title: "API Endpoint" + source-instagram: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated. If left blank, the start date will be\ + \ set to 2 years before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: + "The value of the access token generated with instagram_basic,\ + \ instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram\ + \ Public Content Access permissions. See the docs for more information" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + sourceType: + title: "instagram" + const: "instagram" + enum: + - "instagram" + order: 0 + type: "string" + required: + - "access_token" + - "sourceType" + source-instagram-update: + title: "Source Instagram" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for User\ + \ Insights, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated. If left blank, the start date will be\ + \ set to 2 years before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + access_token: + title: "Access Token" + description: + "The value of the access token generated with instagram_basic,\ + \ instagram_manage_insights, pages_show_list, pages_read_engagement, Instagram\ + \ Public Content Access permissions. See the docs for more information" + airbyte_secret: true + type: "string" + required: + - "access_token" + source-dbt: + type: "object" + required: + - "api_key_2" + - "account_id" + - "sourceType" + properties: + api_key_2: + type: "string" + order: 0 + title: "Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + order: 1 + title: "account_id" + sourceType: + title: "dbt" + const: "dbt" + enum: + - "dbt" + order: 0 + type: "string" + source-dbt-update: + type: "object" + required: + - "api_key_2" + - "account_id" + properties: + api_key_2: + type: "string" + order: 0 + title: "Token" + airbyte_secret: true + account_id: + type: "string" + order: 1 + title: "account_id" + source-nylas: + type: "object" + required: + - "api_key" + - "api_server" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + api_server: + type: "string" + enum: + - "us" + - "eu" + order: 1 + title: "API Server" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "nylas" + const: "nylas" + enum: + - "nylas" + order: 0 + type: "string" + source-nylas-update: + type: "object" + required: + - "api_key" + - "api_server" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + api_server: + type: "string" + enum: + - "us" + - "eu" + order: 1 + title: "API Server" + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + source-s3: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + role_arn: + title: "AWS Role ARN" + description: + "Specifies the Amazon Resource Name (ARN) of an IAM role that\ + \ you want to use to perform operations requested using this profile.\ + \ Set the External ID to the Airbyte workspace ID, which can be found\ + \ in the URL of this page." + order: 6 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + examples: + - "my-s3-endpoint.com" + - "https://my-s3-endpoint.com" + order: 4 + type: "string" + region_name: + title: "AWS Region" + description: + "AWS region where the S3 bucket is located. If not provided,\ + \ the region will be determined automatically." + order: 5 + type: "string" + sourceType: + title: "s3" + const: "s3" + enum: + - "s3" + order: 0 + type: "string" + required: + - "streams" + - "bucket" + - "sourceType" + source-s3-update: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + role_arn: + title: "AWS Role ARN" + description: + "Specifies the Amazon Resource Name (ARN) of an IAM role that\ + \ you want to use to perform operations requested using this profile.\ + \ Set the External ID to the Airbyte workspace ID, which can be found\ + \ in the URL of this page." + order: 6 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + examples: + - "my-s3-endpoint.com" + - "https://my-s3-endpoint.com" + order: 4 + type: "string" + region_name: + title: "AWS Region" + description: + "AWS region where the S3 bucket is located. If not provided,\ + \ the region will be determined automatically." + order: 5 + type: "string" + required: + - "streams" + - "bucket" + source-azure-blob-storage: + title: "SourceAzureBlobStorageSpec" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\ + \ File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Azure Blob Storage" + type: "object" + order: 2 + oneOf: + - title: "Authenticate via Oauth2" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2" + const: "oauth2" + enum: + - "oauth2" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft Azure Application user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Authenticate via Storage Account Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "storage_account_key" + const: "storage_account_key" + enum: + - "storage_account_key" + type: "string" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "azure_blob_storage_account_key" + - "auth_type" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + order: 3 + type: "string" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + description: "The name of the Azure blob storage container." + examples: + - "airbytetescontainername" + order: 4 + type: "string" + azure_blob_storage_endpoint: + title: "Endpoint Domain Name" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + order: 11 + type: "string" + sourceType: + title: "azure-blob-storage" + const: "azure-blob-storage" + enum: + - "azure-blob-storage" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "azure_blob_storage_account_name" + - "azure_blob_storage_container_name" + - "sourceType" + source-azure-blob-storage-update: + title: "SourceAzureBlobStorageSpec" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy Azure Blob Storage v0 configs into v1 configs using the\ + \ File-Based CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Azure Blob Storage" + type: "object" + order: 2 + oneOf: + - title: "Authenticate via Oauth2" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "oauth2" + const: "oauth2" + enum: + - "oauth2" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft Azure Application user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Authenticate via Storage Account Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "storage_account_key" + const: "storage_account_key" + enum: + - "storage_account_key" + type: "string" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + order: 3 + type: "string" + required: + - "azure_blob_storage_account_key" + - "auth_type" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + order: 3 + type: "string" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + description: "The name of the Azure blob storage container." + examples: + - "airbytetescontainername" + order: 4 + type: "string" + azure_blob_storage_endpoint: + title: "Endpoint Domain Name" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + order: 11 + type: "string" + required: + - "streams" + - "credentials" + - "azure_blob_storage_account_name" + - "azure_blob_storage_container_name" + source-close-com: + title: "Close.com Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Replication Start Date" + type: "string" + description: + "The start date to sync data; all data after this date will\ + \ be replicated. Leave blank to retrieve all the data available in the\ + \ account. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + sourceType: + title: "close-com" + const: "close-com" + enum: + - "close-com" + order: 0 + type: "string" + source-close-com-update: + title: "Close.com Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Close.com API key (usually starts with 'api_'; find yours\ + \ here)." + airbyte_secret: true + start_date: + title: "Replication Start Date" + type: "string" + description: + "The start date to sync data; all data after this date will\ + \ be replicated. Leave blank to retrieve all the data available in the\ + \ account. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + source-zendesk-sunshine: + type: "object" + required: + - "start_date" + - "subdomain" + - "sourceType" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: + "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + sourceType: + title: "zendesk-sunshine" + const: "zendesk-sunshine" + enum: + - "zendesk-sunshine" + order: 0 + type: "string" + source-zendesk-sunshine-update: + type: "object" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: + "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + source-canny: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "You can find your secret API key in Your Canny Subdomain >\ + \ Settings > API" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "canny" + const: "canny" + enum: + - "canny" + order: 0 + type: "string" + source-canny-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "You can find your secret API key in Your Canny Subdomain >\ + \ Settings > API" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-exchange-rates: + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + - "sourceType" + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + access_key: + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + base: + type: "string" + description: + "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + ignore_weekends: + type: "boolean" + description: "Ignore weekends? (Exchanges don't run on weekends)" + default: true + sourceType: + title: "exchange-rates" + const: "exchange-rates" + enum: + - "exchange-rates" + order: 0 + type: "string" + source-exchange-rates-update: + title: "exchangeratesapi.io Source Spec" + type: "object" + required: + - "start_date" + - "access_key" + properties: + start_date: + type: "string" + description: "Start getting data from that date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + access_key: + type: "string" + description: + "Your API Key. See here. The key is case sensitive." + airbyte_secret: true + base: + type: "string" + description: + "ISO reference currency. See here. Free plan doesn't support Source Currency Switching, default\ + \ base currency is EUR" + examples: + - "EUR" + - "USD" + ignore_weekends: + type: "boolean" + description: "Ignore weekends? (Exchanges don't run on weekends)" + default: true + source-woocommerce: + type: "object" + title: "Woocommerce Spec" + required: + - "api_key" + - "api_secret" + - "shop" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "Customer Key" + description: "Customer Key for API in WooCommerce shop" + airbyte_secret: true + x-speakeasy-param-sensitive: true + api_secret: + type: "string" + order: 1 + title: "Customer Secret" + description: "Customer Secret for API in WooCommerce shop" + airbyte_secret: true + x-speakeasy-param-sensitive: true + shop: + type: "string" + order: 2 + title: "Shop Name" + description: + "The name of the store. For https://EXAMPLE.com, the shop name\ + \ is 'EXAMPLE.com'." + start_date: + type: "string" + order: 3 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2021-01-01" + description: "The date you would like to replicate data from. Format: YYYY-MM-DD" + sourceType: + title: "woocommerce" + const: "woocommerce" + enum: + - "woocommerce" + order: 0 + type: "string" + source-woocommerce-update: + type: "object" + title: "Woocommerce Spec" + required: + - "api_key" + - "api_secret" + - "shop" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "Customer Key" + description: "Customer Key for API in WooCommerce shop" + airbyte_secret: true + api_secret: + type: "string" + order: 1 + title: "Customer Secret" + description: "Customer Secret for API in WooCommerce shop" + airbyte_secret: true + shop: + type: "string" + order: 2 + title: "Shop Name" + description: + "The name of the store. For https://EXAMPLE.com, the shop name\ + \ is 'EXAMPLE.com'." + start_date: + type: "string" + order: 3 + title: "Start Date" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2021-01-01" + description: "The date you would like to replicate data from. Format: YYYY-MM-DD" + source-linkedin-pages: + type: "object" + required: + - "org_id" + - "sourceType" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the LinkedIn developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_secret: + type: "string" + title: "Client secret" + description: "The client secret of the LinkedIn developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access token" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + title: "Authentication" + org_id: + type: "string" + order: 1 + title: "Organization ID" + examples: + - "123456789" + description: "Specify the Organization ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + default: "2023-01-01T00:00:00Z" + description: + "Start date for getting metrics per time period. Must be atmost\ + \ 12 months before the request date (UTC) and atleast 2 days prior to\ + \ the request date (UTC). See https://bit.ly/linkedin-pages-date-rules\ + \ {{ \"\\n\" }} {{ response.errorDetails }}" + time_granularity_type: + enum: + - "DAY" + - "MONTH" + type: "string" + order: 3 + title: "Time Granularity Type" + default: "DAY" + description: + "Granularity of the statistics for metrics per time period.\ + \ Must be either \"DAY\" or \"MONTH\"" + sourceType: + title: "linkedin-pages" + const: "linkedin-pages" + enum: + - "linkedin-pages" + order: 0 + type: "string" + source-linkedin-pages-update: + type: "object" + required: + - "org_id" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the LinkedIn developer application." + airbyte_secret: true + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_secret: + type: "string" + title: "Client secret" + description: "The client secret of the LinkedIn developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + - type: "object" + title: "Access token" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access token" + description: + "The token value generated using the LinkedIn Developers\ + \ OAuth Token Tools. See the docs to obtain yours." + airbyte_secret: true + order: 0 + title: "Authentication" + org_id: + type: "string" + order: 1 + title: "Organization ID" + examples: + - "123456789" + description: "Specify the Organization ID" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + default: "2023-01-01T00:00:00Z" + description: + "Start date for getting metrics per time period. Must be atmost\ + \ 12 months before the request date (UTC) and atleast 2 days prior to\ + \ the request date (UTC). See https://bit.ly/linkedin-pages-date-rules\ + \ {{ \"\\n\" }} {{ response.errorDetails }}" + time_granularity_type: + enum: + - "DAY" + - "MONTH" + type: "string" + order: 3 + title: "Time Granularity Type" + default: "DAY" + description: + "Granularity of the statistics for metrics per time period.\ + \ Must be either \"DAY\" or \"MONTH\"" + source-planhat: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Your Planhat API Access Token" + order: 0 + title: "API Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "planhat" + const: "planhat" + enum: + - "planhat" + order: 0 + type: "string" + source-planhat-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Your Planhat API Access Token" + order: 0 + title: "API Token" + airbyte_secret: true + source-whisky-hunter: + type: "object" + required: + - "sourceType" + properties: + sourceType: + title: "whisky-hunter" + const: "whisky-hunter" + enum: + - "whisky-hunter" + order: 0 + type: "string" + source-whisky-hunter-update: + type: "object" + required: [] + properties: {} + source-tvmaze-schedule: + type: "object" + required: + - "start_date" + - "domestic_schedule_country_code" + - "sourceType" + properties: + start_date: + type: "string" + description: "Start date for TV schedule retrieval. May be in the future." + order: 0 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: + "End date for TV schedule retrieval. May be in the future.\ + \ Optional.\n" + order: 1 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + domestic_schedule_country_code: + type: "string" + description: "Country code for domestic TV schedule retrieval." + examples: + - "US" + - "GB" + order: 2 + web_schedule_country_code: + type: "string" + description: + "ISO 3166-1 country code for web TV schedule retrieval. Leave\ + \ blank for\nall countries plus global web channels (e.g. Netflix). Alternatively,\n\ + set to 'global' for just global web channels.\n" + examples: + - "US" + - "GB" + - "global" + order: 3 + sourceType: + title: "tvmaze-schedule" + const: "tvmaze-schedule" + enum: + - "tvmaze-schedule" + order: 0 + type: "string" + source-tvmaze-schedule-update: + type: "object" + required: + - "start_date" + - "domestic_schedule_country_code" + properties: + start_date: + type: "string" + description: "Start date for TV schedule retrieval. May be in the future." + order: 0 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + end_date: + type: "string" + description: + "End date for TV schedule retrieval. May be in the future.\ + \ Optional.\n" + order: 1 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + domestic_schedule_country_code: + type: "string" + description: "Country code for domestic TV schedule retrieval." + examples: + - "US" + - "GB" + order: 2 + web_schedule_country_code: + type: "string" + description: + "ISO 3166-1 country code for web TV schedule retrieval. Leave\ + \ blank for\nall countries plus global web channels (e.g. Netflix). Alternatively,\n\ + set to 'global' for just global web channels.\n" + examples: + - "US" + - "GB" + - "global" + order: 3 + source-salesloft: + type: "object" + required: + - "credentials" + - "start_date" + - "sourceType" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "Authenticate via OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "auth_type" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Salesloft developer application." + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Salesloft developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining a new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + format: "date-time" + description: "The date-time when the access token should be refreshed." + - type: "object" + title: "Authenticate via API Key" + required: + - "api_key" + - "auth_type" + properties: + api_key: + type: "string" + title: "API Key" + description: + "API Key for making authenticated requests. More instruction\ + \ on how to find this value in our docs" + airbyte_secret: true + x-speakeasy-param-sensitive: true + auth_type: + type: "string" + const: "api_key" + enum: + - "api_key" + order: 0 + title: "Credentials" + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + examples: + - "2020-11-16T00:00:00Z" + description: + "The date from which you'd like to replicate data for Salesloft\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + sourceType: + title: "salesloft" + const: "salesloft" + enum: + - "salesloft" + order: 0 + type: "string" + source-salesloft-update: + type: "object" + required: + - "credentials" + - "start_date" + properties: + credentials: + type: "object" + oneOf: + - type: "object" + title: "Authenticate via OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "auth_type" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Salesloft developer application." + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Salesloft developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining a new access token." + airbyte_secret: true + token_expiry_date: + type: "string" + format: "date-time" + description: "The date-time when the access token should be refreshed." + - type: "object" + title: "Authenticate via API Key" + required: + - "api_key" + - "auth_type" + properties: + api_key: + type: "string" + title: "API Key" + description: + "API Key for making authenticated requests. More instruction\ + \ on how to find this value in our docs" + airbyte_secret: true + auth_type: + type: "string" + const: "api_key" + enum: + - "api_key" + order: 0 + title: "Credentials" + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + examples: + - "2020-11-16T00:00:00Z" + description: + "The date from which you'd like to replicate data for Salesloft\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + source-shortio: + title: "Shortio Spec" + type: "object" + required: + - "domain_id" + - "secret_key" + - "start_date" + - "sourceType" + properties: + domain_id: + type: "string" + desciprtion: "Short.io Domain ID" + title: "Domain ID" + airbyte_secret: false + x-speakeasy-param-sensitive: true + secret_key: + type: "string" + title: "Secret Key" + description: "Short.io Secret Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-07-30T03:43:59.244Z" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "shortio" + const: "shortio" + enum: + - "shortio" + order: 0 + type: "string" + source-shortio-update: + title: "Shortio Spec" + type: "object" + required: + - "domain_id" + - "secret_key" + - "start_date" + properties: + domain_id: + type: "string" + desciprtion: "Short.io Domain ID" + title: "Domain ID" + airbyte_secret: false + secret_key: + type: "string" + title: "Secret Key" + description: "Short.io Secret Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-07-30T03:43:59.244Z" + airbyte_secret: false + source-instatus: + title: "Instatus Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Instatus REST API key" + x-speakeasy-param-sensitive: true + sourceType: + title: "instatus" + const: "instatus" + enum: + - "instatus" + order: 0 + type: "string" + source-instatus-update: + title: "Instatus Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Instatus REST API key" + source-yandex-metrica: + title: "Yandex Metrica Spec" + type: "object" + required: + - "auth_token" + - "counter_id" + - "start_date" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication Token" + description: "Your Yandex Metrica API access token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + counter_id: + type: "string" + title: "Counter ID" + description: "Counter ID" + pattern: "^[0-9]+$" + order: 1 + start_date: + title: "Start Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\"." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 2 + end_date: + title: "End Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\". If not provided will sync till most recent date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 3 + sourceType: + title: "yandex-metrica" + const: "yandex-metrica" + enum: + - "yandex-metrica" + order: 0 + type: "string" + source-yandex-metrica-update: + title: "Yandex Metrica Spec" + type: "object" + required: + - "auth_token" + - "counter_id" + - "start_date" + properties: + auth_token: + type: "string" + title: "Authentication Token" + description: "Your Yandex Metrica API access token" + airbyte_secret: true + order: 0 + counter_id: + type: "string" + title: "Counter ID" + description: "Counter ID" + pattern: "^[0-9]+$" + order: 1 + start_date: + title: "Start Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\"." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 2 + end_date: + title: "End Date" + type: "string" + description: + "Starting point for your data replication, in format of \"\ + YYYY-MM-DD\". If not provided will sync till most recent date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-01-01" + order: 3 + source-vwo: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "vwo" + const: "vwo" + enum: + - "vwo" + order: 0 + type: "string" + source-vwo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-aircall: + type: "object" + required: + - "api_id" + - "api_token" + - "start_date" + - "sourceType" + properties: + api_id: + type: "string" + description: "App ID found at settings https://dashboard.aircall.io/integrations/api-keys" + title: "API ID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + api_token: + type: "string" + description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)" + title: "API Token" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + sourceType: + title: "aircall" + const: "aircall" + enum: + - "aircall" + order: 0 + type: "string" + source-aircall-update: + type: "object" + required: + - "api_id" + - "api_token" + - "start_date" + properties: + api_id: + type: "string" + description: "App ID found at settings https://dashboard.aircall.io/integrations/api-keys" + title: "API ID" + airbyte_secret: true + order: 0 + api_token: + type: "string" + description: "App token found at settings (Ref- https://dashboard.aircall.io/integrations/api-keys)" + title: "API Token" + airbyte_secret: true + order: 1 + start_date: + type: "string" + description: + "Date time filter for incremental filter, Specify which date\ + \ to extract from." + title: "Date-From Filter" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + examples: + - "2022-03-01T00:00:00.000Z" + format: "date-time" + order: 2 + source-clickup-api: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + include_closed_tasks: + type: "boolean" + description: + "Include or exclude closed tasks. By default, they are excluded.\ + \ See here." + order: 5 + title: "Include Closed Tasks" + default: false + sourceType: + title: "clickup-api" + const: "clickup-api" + enum: + - "clickup-api" + order: 0 + type: "string" + source-clickup-api-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Every ClickUp API call required authentication. This field\ + \ is your personal API token. See here." + order: 0 + airbyte_secret: true + include_closed_tasks: + type: "boolean" + description: + "Include or exclude closed tasks. By default, they are excluded.\ + \ See here." + order: 5 + title: "Include Closed Tasks" + default: false + source-ezofficeinventory: + type: "object" + required: + - "api_key" + - "subdomain" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your EZOfficeInventory Access Token. API Access is disabled\ + \ by default. Enable API Access in Settings > Integrations > API Integration\ + \ and click on Update to generate a new access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + description: + "The company name used in signup, also visible in the URL when\ + \ logged in." + name: "subdomain" + order: 1 + title: "Subdomain" + airbyte_secret: false + x-speakeasy-param-sensitive: true + start_date: + type: "string" + description: + "Earliest date you want to sync historical streams (inventory_histories,\ + \ asset_histories, asset_stock_histories) from" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "ezofficeinventory" + const: "ezofficeinventory" + enum: + - "ezofficeinventory" + order: 0 + type: "string" + source-ezofficeinventory-update: + type: "object" + required: + - "api_key" + - "subdomain" + - "start_date" + properties: + api_key: + type: "string" + description: + "Your EZOfficeInventory Access Token. API Access is disabled\ + \ by default. Enable API Access in Settings > Integrations > API Integration\ + \ and click on Update to generate a new access token" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + subdomain: + type: "string" + description: + "The company name used in signup, also visible in the URL when\ + \ logged in." + name: "subdomain" + order: 1 + title: "Subdomain" + airbyte_secret: false + start_date: + type: "string" + description: + "Earliest date you want to sync historical streams (inventory_histories,\ + \ asset_histories, asset_stock_histories) from" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-snapchat-marketing: + title: "Snapchat Marketing Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + description: + "Date in the format 2022-01-01. Any data before this date will\ + \ not be replicated." + examples: + - "2022-01-01" + default: "2022-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "Date in the format 2017-01-25. Any data after this date will\ + \ not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-30" + order: 4 + format: "date" + action_report_time: + type: "string" + enum: + - "conversion" + - "impression" + title: "Action Report Time" + description: "Specifies the principle for conversion reporting." + default: "conversion" + order: 5 + swipe_up_attribution_window: + type: "string" + title: "Swipe Up Attribution Window" + description: "Attribution window for swipe ups." + enum: + - "1_DAY" + - "7_DAY" + - "28_DAY" + default: "28_DAY" + order: 6 + view_attribution_window: + type: "string" + title: "View Attribution Window" + description: "Attribution window for views." + enum: + - "1_HOUR" + - "3_HOUR" + - "6_HOUR" + - "1_DAY" + - "7_DAY" + default: "1_DAY" + order: 7 + sourceType: + title: "snapchat-marketing" + const: "snapchat-marketing" + enum: + - "snapchat-marketing" + order: 0 + type: "string" + source-snapchat-marketing-update: + title: "Snapchat Marketing Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + refresh_token: + title: "Refresh Token" + type: "string" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 2 + start_date: + title: "Start Date" + type: "string" + description: + "Date in the format 2022-01-01. Any data before this date will\ + \ not be replicated." + examples: + - "2022-01-01" + default: "2022-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 3 + format: "date" + end_date: + type: "string" + title: "End Date" + description: + "Date in the format 2017-01-25. Any data after this date will\ + \ not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "2022-01-30" + order: 4 + format: "date" + action_report_time: + type: "string" + enum: + - "conversion" + - "impression" + title: "Action Report Time" + description: "Specifies the principle for conversion reporting." + default: "conversion" + order: 5 + swipe_up_attribution_window: + type: "string" + title: "Swipe Up Attribution Window" + description: "Attribution window for swipe ups." + enum: + - "1_DAY" + - "7_DAY" + - "28_DAY" + default: "28_DAY" + order: 6 + view_attribution_window: + type: "string" + title: "View Attribution Window" + description: "Attribution window for views." + enum: + - "1_HOUR" + - "3_HOUR" + - "6_HOUR" + - "1_DAY" + - "7_DAY" + default: "1_DAY" + order: 7 + source-gitlab: + title: "Source Gitlab Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for GitLab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\ + \ will be replicated. All data generated after this date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + api_url: + type: "string" + examples: + - "gitlab.com" + - "https://gitlab.com" + - "https://gitlab.company.org" + title: "API URL" + default: "gitlab.com" + description: "Please enter your basic URL from GitLab instance." + order: 2 + groups_list: + type: "array" + items: + type: "string" + examples: + - "airbyte.io" + title: "Groups" + description: "List of groups. e.g. airbyte.io." + order: 3 + projects_list: + type: "array" + items: + type: "string" + title: "Projects" + examples: + - "airbyte.io/documentation" + description: + "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab." + order: 4 + sourceType: + title: "gitlab" + const: "gitlab" + enum: + - "gitlab" + order: 0 + type: "string" + source-gitlab-update: + title: "Source Gitlab Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Gitlab account and then generate a personal\ + \ Access Token." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for GitLab\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Optional. If not set, all data\ + \ will be replicated. All data generated after this date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + api_url: + type: "string" + examples: + - "gitlab.com" + - "https://gitlab.com" + - "https://gitlab.company.org" + title: "API URL" + default: "gitlab.com" + description: "Please enter your basic URL from GitLab instance." + order: 2 + groups_list: + type: "array" + items: + type: "string" + examples: + - "airbyte.io" + title: "Groups" + description: "List of groups. e.g. airbyte.io." + order: 3 + projects_list: + type: "array" + items: + type: "string" + title: "Projects" + examples: + - "airbyte.io/documentation" + description: + "Space-delimited list of projects. e.g. airbyte.io/documentation\ + \ meltano/tap-gitlab." + order: 4 + source-launchdarkly: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + title: "Access token" + description: + "Your Access token. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "launchdarkly" + const: "launchdarkly" + enum: + - "launchdarkly" + order: 0 + type: "string" + source-launchdarkly-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + title: "Access token" + description: + "Your Access token. See here." + airbyte_secret: true + order: 0 + source-snowflake: + title: "Snowflake Source Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Key Pair Authentication" + type: "object" + order: 1 + required: + - "username" + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + order: 0 + enum: + - "Key Pair Authentication" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 2 + properties: + auth_type: + type: "string" + const: "username/password" + order: 0 + enum: + - "username/password" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + x-speakeasy-param-sensitive: true + order: 0 + host: + description: + "The host domain of the snowflake instance (must include the\ + \ account, region, cloud environment, and end with snowflakecomputing.com)." + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + type: "string" + title: "Account Name" + order: 1 + role: + description: "The role you created for Airbyte to access Snowflake." + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 2 + warehouse: + description: "The warehouse you created for Airbyte to access data." + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 3 + database: + description: "The database you created for Airbyte to access data." + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 4 + schema: + description: + "The source Snowflake schema tables. Leave empty to access\ + \ tables from multiple schemas." + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Schema" + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + sourceType: + title: "snowflake" + const: "snowflake" + enum: + - "snowflake" + order: 0 + type: "string" + source-snowflake-update: + title: "Snowflake Source Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + order: 0 + required: + - "client_id" + - "client_secret" + - "auth_type" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 3 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token for making authenticated requests." + airbyte_secret: true + order: 4 + - title: "Key Pair Authentication" + type: "object" + order: 1 + required: + - "username" + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + order: 0 + enum: + - "Key Pair Authentication" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + order: 2 + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + order: 3 + - title: "Username and Password" + type: "object" + required: + - "username" + - "password" + - "auth_type" + order: 2 + properties: + auth_type: + type: "string" + const: "username/password" + order: 0 + enum: + - "username/password" + username: + description: + "The username you created to allow Airbyte to access\ + \ the database." + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 1 + password: + description: "The password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 2 + order: 0 + host: + description: + "The host domain of the snowflake instance (must include the\ + \ account, region, cloud environment, and end with snowflakecomputing.com)." + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + type: "string" + title: "Account Name" + order: 1 + role: + description: "The role you created for Airbyte to access Snowflake." + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 2 + warehouse: + description: "The warehouse you created for Airbyte to access data." + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 3 + database: + description: "The database you created for Airbyte to access data." + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 4 + schema: + description: + "The source Snowflake schema tables. Leave empty to access\ + \ tables from multiple schemas." + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Schema" + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + source-auth0: + title: "Auth0 Management API Spec" + type: "object" + required: + - "base_url" + - "credentials" + - "sourceType" + properties: + base_url: + type: "string" + title: "Base URL" + examples: + - "https://dev-yourOrg.us.auth0.com/" + description: + "The Authentication API is served over HTTPS. All URLs referenced\ + \ in the documentation have the following base `https://YOUR_DOMAIN`" + credentials: + title: "Authentication Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2 Confidential Application" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "audience" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_confidential_application" + order: 0 + enum: + - "oauth2_confidential_application" + client_id: + title: "Client ID" + description: + "Your application's Client ID. You can find this value\ + \ on the application's\ + \ settings tab after you login the admin portal." + type: "string" + examples: + - "Client_ID" + client_secret: + title: "Client Secret" + description: + "Your application's Client Secret. You can find this\ + \ value on the application's settings tab after you login the admin portal." + type: "string" + examples: + - "Client_Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + audience: + title: "Audience" + description: + "The audience for the token, which is your API. You can\ + \ find this in the Identifier field on your API's settings tab" + type: "string" + examples: + - "https://dev-yourOrg.us.auth0.com/api/v2/" + - type: "object" + title: "OAuth2 Access Token" + required: + - "access_token" + - "auth_type" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_access_token" + examples: + - "oauth2_access_token" + order: 0 + enum: + - "oauth2_access_token" + access_token: + title: "OAuth2 Access Token" + description: + "Also called API Access Token The access token used to call the Auth0 Management\ + \ API Token. It's a JWT that contains specific grant permissions\ + \ knowns as scopes." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "auth0" + const: "auth0" + enum: + - "auth0" + order: 0 + type: "string" + source-auth0-update: + title: "Auth0 Management API Spec" + type: "object" + required: + - "base_url" + - "credentials" + properties: + base_url: + type: "string" + title: "Base URL" + examples: + - "https://dev-yourOrg.us.auth0.com/" + description: + "The Authentication API is served over HTTPS. All URLs referenced\ + \ in the documentation have the following base `https://YOUR_DOMAIN`" + credentials: + title: "Authentication Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2 Confidential Application" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "audience" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_confidential_application" + order: 0 + enum: + - "oauth2_confidential_application" + client_id: + title: "Client ID" + description: + "Your application's Client ID. You can find this value\ + \ on the application's\ + \ settings tab after you login the admin portal." + type: "string" + examples: + - "Client_ID" + client_secret: + title: "Client Secret" + description: + "Your application's Client Secret. You can find this\ + \ value on the application's settings tab after you login the admin portal." + type: "string" + examples: + - "Client_Secret" + airbyte_secret: true + audience: + title: "Audience" + description: + "The audience for the token, which is your API. You can\ + \ find this in the Identifier field on your API's settings tab" + type: "string" + examples: + - "https://dev-yourOrg.us.auth0.com/api/v2/" + - type: "object" + title: "OAuth2 Access Token" + required: + - "access_token" + - "auth_type" + properties: + auth_type: + type: "string" + title: "Authentication Method" + const: "oauth2_access_token" + examples: + - "oauth2_access_token" + order: 0 + enum: + - "oauth2_access_token" + access_token: + title: "OAuth2 Access Token" + description: + "Also called API Access Token The access token used to call the Auth0 Management\ + \ API Token. It's a JWT that contains specific grant permissions\ + \ knowns as scopes." + type: "string" + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false + source-linnworks: + title: "Linnworks Spec" + type: "object" + required: + - "application_id" + - "application_secret" + - "token" + - "start_date" + - "sourceType" + properties: + application_id: + title: "Application ID." + description: "Linnworks Application ID" + type: "string" + application_secret: + title: "Application Secret" + description: "Linnworks Application Secret" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + token: + title: "API Token" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + type: "string" + format: "date-time" + sourceType: + title: "linnworks" + const: "linnworks" + enum: + - "linnworks" + order: 0 + type: "string" + source-linnworks-update: + title: "Linnworks Spec" + type: "object" + required: + - "application_id" + - "application_secret" + - "token" + - "start_date" + properties: + application_id: + title: "Application ID." + description: "Linnworks Application ID" + type: "string" + application_secret: + title: "Application Secret" + description: "Linnworks Application Secret" + type: "string" + airbyte_secret: true + token: + title: "API Token" + type: "string" + airbyte_secret: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + type: "string" + format: "date-time" + source-microsoft-sharepoint: + title: "Microsoft SharePoint Source Spec" + description: + "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the SharePoint API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access,\ + \ 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to\ + \ search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + sourceType: + title: "microsoft-sharepoint" + const: "microsoft-sharepoint" + enum: + - "microsoft-sharepoint" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "sourceType" + source-microsoft-sharepoint-update: + title: "Microsoft SharePoint Source Spec" + description: + "SourceMicrosoftSharePointSpec class for Microsoft SharePoint Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the SharePoint API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft SharePoint user" + airbyte_secret: true + type: "string" + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' for all SharePoint drives the user can access,\ + \ 'SHARED_ITEMS' for shared items the user has access to, and 'ALL' to\ + \ search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + required: + - "streams" + - "credentials" + source-amazon-sqs: + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + - "sourceType" + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: + "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: + "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: + "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + x-speakeasy-param-sensitive: true + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + x-speakeasy-param-sensitive: true + sourceType: + title: "amazon-sqs" + const: "amazon-sqs" + enum: + - "amazon-sqs" + order: 0 + type: "string" + source-amazon-sqs-update: + title: "Amazon SQS Source Spec" + type: "object" + required: + - "queue_url" + - "region" + - "delete_messages" + properties: + queue_url: + title: "Queue URL" + description: "URL of the SQS Queue" + type: "string" + examples: + - "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" + order: 0 + region: + title: "AWS Region" + description: "AWS Region of the SQS Queue" + type: "string" + enum: + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 1 + delete_messages: + title: "Delete Messages After Read" + description: + "If Enabled, messages will be deleted from the SQS Queue after\ + \ being read. If Disabled, messages are left in the queue and can be read\ + \ more than once. WARNING: Enabling this option can result in data loss\ + \ in cases of failure, use with caution, see documentation for more detail. " + type: "boolean" + default: false + order: 2 + max_batch_size: + title: "Max Batch Size" + description: "Max amount of messages to get in one batch (10 max)" + type: "integer" + examples: + - "5" + order: 3 + max_wait_time: + title: "Max Wait Time" + description: + "Max amount of time in seconds to wait for messages in a single\ + \ poll (20 max)" + type: "integer" + examples: + - "5" + order: 4 + attributes_to_return: + title: "Message Attributes To Return" + description: "Comma separated list of Mesage Attribute names to return" + type: "string" + examples: + - "attr1,attr2" + order: 5 + visibility_timeout: + title: "Message Visibility Timeout" + description: + "Modify the Visibility Timeout of the individual message from\ + \ the Queue's default (seconds)." + type: "integer" + examples: + - "15" + order: 6 + access_key: + title: "AWS IAM Access Key ID" + description: "The Access Key ID of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "xxxxxHRNxxx3TBxxxxxx" + airbyte_secret: true + order: 7 + secret_key: + title: "AWS IAM Secret Key" + description: "The Secret Key of the AWS IAM Role to use for pulling messages" + type: "string" + examples: + - "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" + airbyte_secret: true + order: 8 + source-sonar-cloud: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + - "sourceType" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "sonar-cloud" + const: "sonar-cloud" + enum: + - "sonar-cloud" + order: 0 + type: "string" + source-sonar-cloud-update: + type: "object" + required: + - "component_keys" + - "organization" + - "user_token" + properties: + component_keys: + type: "array" + title: "Component Keys" + description: "Comma-separated list of component keys." + examples: + - "airbyte-ws-order" + - "airbyte-ws-checkout" + order: 0 + end_date: + type: "string" + title: "End date" + description: "To retrieve issues created before the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 1 + organization: + type: "string" + title: "Organization" + description: + "Organization key. See here." + examples: + - "airbyte" + order: 2 + start_date: + type: "string" + title: "Start date" + description: "To retrieve issues created after the given date (inclusive)." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + examples: + - "YYYY-MM-DD" + format: "date" + order: 3 + user_token: + type: "string" + title: "User Token" + description: + "Your User Token. See here. The token is case sensitive." + airbyte_secret: true + order: 4 + source-clockify: + type: "object" + required: + - "api_key" + - "workspace_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "You can get your api access_key here This API is Case Sensitive." + order: 0 + x-speakeasy-param-sensitive: true + api_url: + type: "string" + title: "API Url" + description: + "The URL for the Clockify API. This should only need to be\ + \ modified if connecting to an enterprise version of Clockify." + default: "https://api.clockify.me" + order: 1 + workspace_id: + type: "string" + title: "Workspace Id" + description: "WorkSpace Id" + order: 2 + sourceType: + title: "clockify" + const: "clockify" + enum: + - "clockify" + order: 0 + type: "string" + source-clockify-update: + type: "object" + required: + - "api_key" + - "workspace_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "You can get your api access_key here This API is Case Sensitive." + order: 0 + api_url: + type: "string" + title: "API Url" + description: + "The URL for the Clockify API. This should only need to be\ + \ modified if connecting to an enterprise version of Clockify." + default: "https://api.clockify.me" + order: 1 + workspace_id: + type: "string" + title: "Workspace Id" + description: "WorkSpace Id" + order: 2 + source-marketo: + title: "Source Marketo Spec" + type: "object" + required: + - "domain_url" + - "client_id" + - "client_secret" + - "start_date" + - "sourceType" + properties: + domain_url: + title: "Domain URL" + type: "string" + order: 3 + description: + "Your Marketo Base URL. See the docs for info on how to obtain this." + examples: + - "https://000-AAA-000.mktorest.com" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Marketo developer application. See the\ + \ docs for info on how to obtain this." + order: 0 + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Marketo developer application. See\ + \ the\ + \ docs for info on how to obtain this." + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + type: "string" + order: 2 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-09-25T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + sourceType: + title: "marketo" + const: "marketo" + enum: + - "marketo" + order: 0 + type: "string" + source-marketo-update: + title: "Source Marketo Spec" + type: "object" + required: + - "domain_url" + - "client_id" + - "client_secret" + - "start_date" + properties: + domain_url: + title: "Domain URL" + type: "string" + order: 3 + description: + "Your Marketo Base URL. See the docs for info on how to obtain this." + examples: + - "https://000-AAA-000.mktorest.com" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Marketo developer application. See the\ + \ docs for info on how to obtain this." + order: 0 + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Marketo developer application. See\ + \ the\ + \ docs for info on how to obtain this." + order: 1 + airbyte_secret: true + start_date: + title: "Start Date" + type: "string" + order: 2 + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-09-25T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + source-pocket: + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + - "sourceType" + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: + "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: + "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + sourceType: + title: "pocket" + const: "pocket" + enum: + - "pocket" + order: 0 + type: "string" + source-pocket-update: + title: "Pocket Spec" + type: "object" + required: + - "consumer_key" + - "access_token" + properties: + consumer_key: + type: "string" + title: "Consumer Key" + description: "Your application's Consumer Key." + airbyte_secret: true + order: 0 + access_token: + type: "string" + title: "Access Token" + description: "The user's Pocket access token." + airbyte_secret: true + order: 1 + state: + type: "string" + title: "State" + description: "Select the state of the items to retrieve." + order: 2 + enum: + - "unread" + - "archive" + - "all" + favorite: + type: "boolean" + title: "Is Favorite?" + description: "Retrieve only favorited items." + default: false + order: 3 + tag: + type: "string" + title: "Tag Name" + description: + "Return only items tagged with this tag name. Use _untagged_\ + \ for retrieving only untagged items." + order: 4 + content_type: + type: "string" + title: "Content Type" + description: "Select the content type of the items to retrieve." + order: 5 + enum: + - "article" + - "video" + - "image" + sort: + type: "string" + title: "Sort By" + description: "Sort retrieved items by the given criteria." + order: 6 + enum: + - "newest" + - "oldest" + - "title" + - "site" + detail_type: + type: "string" + title: "Detail Type" + description: "Select the granularity of the information about each item." + order: 7 + enum: + - "simple" + - "complete" + search: + type: "string" + title: "Search Query" + description: + "Only return items whose title or url contain the `search`\ + \ string." + order: 8 + domain: + type: "string" + title: "Domain" + description: "Only return items from a particular `domain`." + order: 9 + since: + type: "string" + title: "Since" + description: "Only return items modified since the given timestamp." + pattern: "[0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}:[0-9]{2}" + examples: + - "2022-10-20 14:14:14" + order: 10 + source-productboard: + type: "object" + required: + - "access_token" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Your Productboard access token. See https://developer.productboard.com/reference/authentication\ + \ for steps to generate one." + name: "api_key" + order: 0 + title: "Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "productboard" + const: "productboard" + enum: + - "productboard" + order: 0 + type: "string" + source-productboard-update: + type: "object" + required: + - "access_token" + - "start_date" + properties: + access_token: + type: "string" + description: + "Your Productboard access token. See https://developer.productboard.com/reference/authentication\ + \ for steps to generate one." + name: "api_key" + order: 0 + title: "Access Token" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-smartsheets: + title: "Smartsheets Source Spec" + type: "object" + required: + - "credentials" + - "spreadsheet_id" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Access Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token to use for accessing your data from\ + \ Smartsheets. This access token must be generated by a user with\ + \ at least read access to the data you'd like to replicate. Generate\ + \ an access token in the Smartsheets main menu by clicking Account\ + \ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + spreadsheet_id: + title: "Sheet ID" + description: + "The spreadsheet ID. Find it by opening the spreadsheet then\ + \ navigating to File > Properties" + type: "string" + order: 1 + metadata_fields: + title: "Metadata Fields" + type: "array" + items: + title: "Validenums" + enum: + - "sheetcreatedAt" + - "sheetid" + - "sheetmodifiedAt" + - "sheetname" + - "sheetpermalink" + - "sheetversion" + - "sheetaccess_level" + - "row_id" + - "row_access_level" + - "row_created_at" + - "row_created_by" + - "row_expanded" + - "row_modified_by" + - "row_parent_id" + - "row_permalink" + - "row_number" + - "row_version" + description: "A List of available columns which metadata can be pulled from." + order: 3 + sourceType: + title: "smartsheets" + const: "smartsheets" + enum: + - "smartsheets" + order: 0 + type: "string" + source-smartsheets-update: + title: "Smartsheets Source Spec" + type: "object" + required: + - "credentials" + - "spreadsheet_id" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "API Access Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token to use for accessing your data from\ + \ Smartsheets. This access token must be generated by a user with\ + \ at least read access to the data you'd like to replicate. Generate\ + \ an access token in the Smartsheets main menu by clicking Account\ + \ > Apps & Integrations > API Access. See the setup guide for information on how to obtain this token." + airbyte_secret: true + spreadsheet_id: + title: "Sheet ID" + description: + "The spreadsheet ID. Find it by opening the spreadsheet then\ + \ navigating to File > Properties" + type: "string" + order: 1 + metadata_fields: + title: "Metadata Fields" + type: "array" + items: + title: "Validenums" + enum: + - "sheetcreatedAt" + - "sheetid" + - "sheetmodifiedAt" + - "sheetname" + - "sheetpermalink" + - "sheetversion" + - "sheetaccess_level" + - "row_id" + - "row_access_level" + - "row_created_at" + - "row_created_by" + - "row_expanded" + - "row_modified_by" + - "row_parent_id" + - "row_permalink" + - "row_number" + - "row_version" + description: "A List of available columns which metadata can be pulled from." + order: 3 + source-lob: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API key to use for authentication. You can find your account's\ + \ API keys in your Dashboard Settings at https://dashboard.lob.com/settings/api-keys." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + sourceType: + title: "lob" + const: "lob" + enum: + - "lob" + order: 0 + type: "string" + source-lob-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "API key to use for authentication. You can find your account's\ + \ API keys in your Dashboard Settings at https://dashboard.lob.com/settings/api-keys." + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + source-iterable: + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + sourceType: + title: "iterable" + const: "iterable" + enum: + - "iterable" + order: 0 + type: "string" + source-iterable-update: + title: "Iterable Spec" + type: "object" + required: + - "start_date" + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Iterable API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Iterable,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2021-04-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + format: "date-time" + source-mysql: + title: "MySql Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "replication_method" + - "sourceType" + properties: + host: + description: "The host name of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port to connect to." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + description: "The database name." + title: "Database" + type: "string" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + always_show: true + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For\ + \ more information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. Read more in the docs." + type: "object" + order: 7 + oneOf: + - title: "preferred" + description: + "Automatically attempt SSL connection. If the MySQL server\ + \ does not support SSL, continue with a regular connection." + required: + - "mode" + properties: + mode: + type: "string" + const: "preferred" + order: 0 + enum: + - "preferred" + - title: "required" + description: + "Always connect with SSL. If the MySQL server doesn’t support\ + \ SSL, the connection will not be established. Certificate Authority\ + \ (CA) and Hostname are not verified." + required: + - "mode" + properties: + mode: + type: "string" + const: "required" + order: 0 + enum: + - "required" + - title: "Verify CA" + description: + "Always connect with SSL. Verifies CA, but allows connection\ + \ even if Hostname does not match." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_ca" + order: 0 + enum: + - "verify_ca" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Verify Identity" + description: "Always connect with SSL. Verify both CA and Hostname." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_identity" + order: 0 + enum: + - "verify_identity" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 8 + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Binary Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the MySQL binary log. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 1 + always_show: true + server_time_zone: + type: "string" + title: "Configured server timezone for the MySQL source (Advanced)" + description: + "Enter the configured MySQL server timezone. This should\ + \ only be done if the configured timezone in your MySQL instance\ + \ does not conform to IANNA standard." + order: 2 + always_show: true + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 3 + always_show: true + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 4 + always_show: true + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "mysql" + const: "mysql" + enum: + - "mysql" + order: 0 + type: "string" + source-mysql-update: + title: "MySql Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "replication_method" + properties: + host: + description: "The host name of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port to connect to." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + description: "The database name." + title: "Database" + type: "string" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + always_show: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). For\ + \ more information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. Read more in the docs." + type: "object" + order: 7 + oneOf: + - title: "preferred" + description: + "Automatically attempt SSL connection. If the MySQL server\ + \ does not support SSL, continue with a regular connection." + required: + - "mode" + properties: + mode: + type: "string" + const: "preferred" + order: 0 + enum: + - "preferred" + - title: "required" + description: + "Always connect with SSL. If the MySQL server doesn’t support\ + \ SSL, the connection will not be established. Certificate Authority\ + \ (CA) and Hostname are not verified." + required: + - "mode" + properties: + mode: + type: "string" + const: "required" + order: 0 + enum: + - "required" + - title: "Verify CA" + description: + "Always connect with SSL. Verifies CA, but allows connection\ + \ even if Hostname does not match." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_ca" + order: 0 + enum: + - "verify_ca" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "Verify Identity" + description: "Always connect with SSL. Verify both CA and Hostname." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify_identity" + order: 0 + enum: + - "verify_identity" + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: + "Client certificate (this is not a required field, but\ + \ if you want to use it, you will need to add the Client key\ + \ as well)" + airbyte_secret: true + multiline: true + order: 2 + always_show: true + client_key: + type: "string" + title: "Client key" + description: + "Client key (this is not a required field, but if you\ + \ want to use it, you will need to add the Client certificate\ + \ as well)" + airbyte_secret: true + multiline: true + order: 3 + always_show: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + order: 8 + default: "CDC" + display_type: "radio" + oneOf: + - title: "Read Changes using Binary Log (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the MySQL binary log. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 1 + always_show: true + server_time_zone: + type: "string" + title: "Configured server timezone for the MySQL source (Advanced)" + description: + "Enter the configured MySQL server timezone. This should\ + \ only be done if the configured timezone in your MySQL instance\ + \ does not conform to IANNA standard." + order: 2 + always_show: true + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 3 + always_show: true + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 4 + always_show: true + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-rollbar: + type: "object" + required: + - "project_access_token" + - "start_date" + - "account_access_token" + - "sourceType" + properties: + project_access_token: + type: "string" + name: "api_key" + title: "Project Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + account_access_token: + type: "string" + title: "Account Access Token" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "rollbar" + const: "rollbar" + enum: + - "rollbar" + order: 0 + type: "string" + source-rollbar-update: + type: "object" + required: + - "project_access_token" + - "start_date" + - "account_access_token" + properties: + project_access_token: + type: "string" + name: "api_key" + title: "Project Access Token" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + account_access_token: + type: "string" + title: "Account Access Token" + airbyte_secret: true + order: 2 + source-emailoctopus: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "EmailOctopus API key" + description: + "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "emailoctopus" + const: "emailoctopus" + enum: + - "emailoctopus" + order: 0 + type: "string" + source-emailoctopus-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "EmailOctopus API key" + description: + "EmailOctopus API Key. See the docs for information on how to generate this key." + airbyte_secret: true + order: 0 + source-railz: + title: "Railz Spec" + type: "object" + required: + - "client_id" + - "secret_key" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID (client_id)" + order: 0 + secret_key: + type: "string" + title: "Secret key" + description: "Secret key (secret_key)" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + description: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + sourceType: + title: "railz" + const: "railz" + enum: + - "railz" + order: 0 + type: "string" + source-railz-update: + title: "Railz Spec" + type: "object" + required: + - "client_id" + - "secret_key" + - "start_date" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID (client_id)" + order: 0 + secret_key: + type: "string" + title: "Secret key" + description: "Secret key (secret_key)" + order: 1 + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + description: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + source-orbit: + type: "object" + required: + - "api_token" + - "workspace" + - "sourceType" + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: + "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + x-speakeasy-param-sensitive: true + workspace: + type: "string" + title: "Workspace" + description: + "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + sourceType: + title: "orbit" + const: "orbit" + enum: + - "orbit" + order: 0 + type: "string" + source-orbit-update: + type: "object" + required: + - "api_token" + - "workspace" + properties: + api_token: + type: "string" + airbyte_secret: true + title: "API Token" + description: + "Authorizes you to work with Orbit workspaces associated with\ + \ the token." + order: 0 + workspace: + type: "string" + title: "Workspace" + description: + "The unique name of the workspace that your API token is associated\ + \ with." + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "Date in the format 2022-06-26. Only load members whose last\ + \ activities are after this date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + source-apify-dataset: + title: "Apify Dataset Spec" + type: "object" + required: + - "token" + - "dataset_id" + - "sourceType" + properties: + token: + type: "string" + title: "API token" + description: + "Personal API token of your Apify account. In Apify Console,\ + \ you can find your API token in the Settings section under the Integrations tab after you login. See\ + \ the Apify Docs for more information." + examples: + - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk" + airbyte_secret: true + x-speakeasy-param-sensitive: true + dataset_id: + type: "string" + title: "Dataset ID" + description: + "ID of the dataset you would like to load to Airbyte. In Apify\ + \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\ + \ for more information." + examples: + - "rHuMdwm6xCFt6WiGU" + sourceType: + title: "apify-dataset" + const: "apify-dataset" + enum: + - "apify-dataset" + order: 0 + type: "string" + source-apify-dataset-update: + title: "Apify Dataset Spec" + type: "object" + required: + - "token" + - "dataset_id" + properties: + token: + type: "string" + title: "API token" + description: + "Personal API token of your Apify account. In Apify Console,\ + \ you can find your API token in the Settings section under the Integrations tab after you login. See\ + \ the Apify Docs for more information." + examples: + - "apify_api_PbVwb1cBbuvbfg2jRmAIHZKgx3NQyfEMG7uk" + airbyte_secret: true + dataset_id: + type: "string" + title: "Dataset ID" + description: + "ID of the dataset you would like to load to Airbyte. In Apify\ + \ Console, you can view your datasets in the Storage section under the Datasets tab after you login. See the Apify Docs\ + \ for more information." + examples: + - "rHuMdwm6xCFt6WiGU" + source-confluence: + type: "object" + required: + - "email" + - "api_token" + - "domain_name" + - "sourceType" + properties: + email: + type: "string" + title: "Email" + description: "Your Confluence login email" + examples: + - "abc@example.com" + order: 0 + api_token: + type: "string" + title: "API Token" + description: + "Please follow the Jira confluence for generating an API token:\ + \ generating an API token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + domain_name: + type: "string" + title: "Domain name" + description: "Your Confluence domain name" + order: 2 + sourceType: + title: "confluence" + const: "confluence" + enum: + - "confluence" + order: 0 + type: "string" + source-confluence-update: + type: "object" + required: + - "email" + - "api_token" + - "domain_name" + properties: + email: + type: "string" + title: "Email" + description: "Your Confluence login email" + examples: + - "abc@example.com" + order: 0 + api_token: + type: "string" + title: "API Token" + description: + "Please follow the Jira confluence for generating an API token:\ + \ generating an API token." + airbyte_secret: true + order: 1 + domain_name: + type: "string" + title: "Domain name" + description: "Your Confluence domain name" + order: 2 + source-coin-api: + title: "Coin API Spec" + type: "object" + required: + - "api_key" + - "environment" + - "symbol_id" + - "period" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + environment: + type: "string" + description: "The environment to use. Either sandbox or production.\n" + enum: + - "sandbox" + - "production" + default: "sandbox" + order: 1 + symbol_id: + type: "string" + description: + "The symbol ID to use. See the documentation for a list.\n\ + https://docs.coinapi.io/#list-all-symbols-get\n" + order: 2 + period: + type: "string" + description: "The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get" + examples: + - "5SEC" + - "2MTH" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: "The start date in ISO 8601 format." + examples: + - "2019-01-01T00:00:00" + end_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: + "The end date in ISO 8601 format. If not supplied, data will\ + \ be returned\nfrom the start date to the current time, or when the count\ + \ of result\nelements reaches its limit.\n" + examples: + - "2019-01-01T00:00:00" + limit: + type: "integer" + description: + "The maximum number of elements to return. If not supplied,\ + \ the default\nis 100. For numbers larger than 100, each 100 items is\ + \ counted as one\nrequest for pricing purposes. Maximum value is 100000.\n" + minimum: 1 + maximum: 100000 + default: 100 + sourceType: + title: "coin-api" + const: "coin-api" + enum: + - "coin-api" + order: 0 + type: "string" + source-coin-api-update: + title: "Coin API Spec" + type: "object" + required: + - "api_key" + - "environment" + - "symbol_id" + - "period" + - "start_date" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + order: 0 + environment: + type: "string" + description: "The environment to use. Either sandbox or production.\n" + enum: + - "sandbox" + - "production" + default: "sandbox" + order: 1 + symbol_id: + type: "string" + description: + "The symbol ID to use. See the documentation for a list.\n\ + https://docs.coinapi.io/#list-all-symbols-get\n" + order: 2 + period: + type: "string" + description: "The period to use. See the documentation for a list. https://docs.coinapi.io/#list-all-periods-get" + examples: + - "5SEC" + - "2MTH" + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: "The start date in ISO 8601 format." + examples: + - "2019-01-01T00:00:00" + end_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}$" + description: + "The end date in ISO 8601 format. If not supplied, data will\ + \ be returned\nfrom the start date to the current time, or when the count\ + \ of result\nelements reaches its limit.\n" + examples: + - "2019-01-01T00:00:00" + limit: + type: "integer" + description: + "The maximum number of elements to return. If not supplied,\ + \ the default\nis 100. For numbers larger than 100, each 100 items is\ + \ counted as one\nrequest for pricing purposes. Maximum value is 100000.\n" + minimum: 1 + maximum: 100000 + default: 100 + source-orb: + type: "object" + required: + - "start_date" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Orb API Key" + description: "Orb API Key, issued from the Orb admin console." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at before this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2022-03-01T00:00:00Z" + order: 1 + end_date: + type: "string" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at after this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2024-03-01T00:00:00Z" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N days. By default, updated objects that are\ + \ not newly created are not incrementally synced." + order: 3 + string_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (string values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 4 + numeric_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (numeric values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 5 + subscription_usage_grouping_key: + type: "string" + title: "Subscription usage grouping key (string value)" + description: "Property key name to group subscription usage by." + order: 6 + plan_id: + type: "string" + title: "Orb Plan ID for Subscription Usage (string value)" + description: + "Orb Plan ID to filter subscriptions that should have usage\ + \ fetched." + order: 7 + sourceType: + title: "orb" + const: "orb" + enum: + - "orb" + order: 0 + type: "string" + source-orb-update: + type: "object" + required: + - "start_date" + - "api_key" + properties: + api_key: + type: "string" + title: "Orb API Key" + description: "Orb API Key, issued from the Orb admin console." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at before this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2022-03-01T00:00:00Z" + order: 1 + end_date: + type: "string" + title: "End Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2022-03-01T00:00:00Z. Any\ + \ data with created_at after this data will not be synced. For Subscription\ + \ Usage, this becomes the `timeframe_start` API parameter." + examples: + - "2024-03-01T00:00:00Z" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N days. By default, updated objects that are\ + \ not newly created are not incrementally synced." + order: 3 + string_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (string values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 4 + numeric_event_properties_keys: + type: "array" + items: + type: "string" + title: "Event properties keys (numeric values)" + description: + "Property key names to extract from all events, in order to\ + \ enrich ledger entries corresponding to an event deduction." + order: 5 + subscription_usage_grouping_key: + type: "string" + title: "Subscription usage grouping key (string value)" + description: "Property key name to group subscription usage by." + order: 6 + plan_id: + type: "string" + title: "Orb Plan ID for Subscription Usage (string value)" + description: + "Orb Plan ID to filter subscriptions that should have usage\ + \ fetched." + order: 7 + source-sentry: + title: "Sentry Spec" + type: "object" + required: + - "auth_token" + - "organization" + - "project" + - "sourceType" + properties: + auth_token: + type: "string" + title: "Authentication Tokens" + description: + "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\ + \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\"" + airbyte_secret: true + x-speakeasy-param-sensitive: true + hostname: + type: "string" + title: "Host Name" + description: + "Host name of Sentry API server.For self-hosted, specify your\ + \ host name here. Otherwise, leave it empty." + default: "sentry.io" + organization: + type: "string" + title: "Organization" + description: "The slug of the organization the groups belong to." + project: + type: "string" + title: "Project" + description: "The name (slug) of the Project you want to sync." + discover_fields: + type: "array" + item: "string" + title: "Discover Event Fields" + description: "Fields to retrieve when fetching discover events" + sourceType: + title: "sentry" + const: "sentry" + enum: + - "sentry" + order: 0 + type: "string" + source-sentry-update: + title: "Sentry Spec" + type: "object" + required: + - "auth_token" + - "organization" + - "project" + properties: + auth_token: + type: "string" + title: "Authentication Tokens" + description: + "Log into Sentry and then create authentication tokens.For self-hosted, you can find or create\ + \ authentication tokens by visiting \"{instance_url_prefix}/settings/account/api/auth-tokens/\"" + airbyte_secret: true + hostname: + type: "string" + title: "Host Name" + description: + "Host name of Sentry API server.For self-hosted, specify your\ + \ host name here. Otherwise, leave it empty." + default: "sentry.io" + organization: + type: "string" + title: "Organization" + description: "The slug of the organization the groups belong to." + project: + type: "string" + title: "Project" + description: "The name (slug) of the Project you want to sync." + discover_fields: + type: "array" + item: "string" + title: "Discover Event Fields" + description: "Fields to retrieve when fetching discover events" + source-notion: + title: "Notion Source Spec" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\ + \ During incremental sync, any data generated before this date will not\ + \ be replicated. If left blank, the start date will be set to 2 years\ + \ before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-11-16T00:00:00.000Z" + type: "string" + format: "date-time" + credentials: + title: "Authentication Method" + description: + "Choose either OAuth (recommended for Airbyte Cloud) or Access\ + \ Token. See our docs\ + \ for more information." + type: "object" + order: 1 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "The Access Token received by completing the OAuth flow\ + \ for your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Access Token" + required: + - "auth_type" + - "token" + properties: + auth_type: + type: "string" + const: "token" + enum: + - "token" + token: + title: "Access Token" + description: + "The Access Token for your private Notion integration.\ + \ See the docs\ + \ for more information on how to obtain this token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "notion" + const: "notion" + enum: + - "notion" + order: 0 + type: "string" + source-notion-update: + title: "Notion Source Spec" + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SS.000Z.\ + \ During incremental sync, any data generated before this date will not\ + \ be replicated. If left blank, the start date will be set to 2 years\ + \ before the present date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-11-16T00:00:00.000Z" + type: "string" + format: "date-time" + credentials: + title: "Authentication Method" + description: + "Choose either OAuth (recommended for Airbyte Cloud) or Access\ + \ Token. See our docs\ + \ for more information." + type: "object" + order: 1 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "The Access Token received by completing the OAuth flow\ + \ for your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + - type: "object" + title: "Access Token" + required: + - "auth_type" + - "token" + properties: + auth_type: + type: "string" + const: "token" + enum: + - "token" + token: + title: "Access Token" + description: + "The Access Token for your private Notion integration.\ + \ See the docs\ + \ for more information on how to obtain this token." + type: "string" + airbyte_secret: true + source-trustpilot: + title: "Trustpilot Spec" + type: "object" + required: + - "credentials" + - "business_units" + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth 2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "API key" + description: + "The API key of the Trustpilot API application. (represents\ + \ the OAuth Client ID)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Secret" + description: + "The Secret of the Trustpilot API application. (represents\ + \ the OAuth Client Secret)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + title: "Token expiry date time" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Key" + description: + "The API key authentication method gives you access to only\ + \ the streams which are part of the Public API. When you want to get\ + \ streams available via the Consumer API (e.g. the private reviews)\ + \ you need to use authentication method OAuth 2.0." + required: + - "client_id" + properties: + auth_type: + type: "string" + const: "apikey" + enum: + - "apikey" + client_id: + type: "string" + title: "API key" + description: "The API key of the Trustpilot API application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + business_units: + type: "array" + items: + type: "string" + title: "Business Unit names" + description: + "The names of business units which shall be synchronized. Some\ + \ streams e.g. configured_business_units or private_reviews use this configuration." + examples: + - "mydomain.com" + - "www.mydomain.com" + start_date: + type: "string" + title: "Start Date" + description: + "For streams with sync. method incremental the start date time\ + \ to be used" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "%Y-%m-%dT%H:%M:%SZ" + sourceType: + title: "trustpilot" + const: "trustpilot" + enum: + - "trustpilot" + order: 0 + type: "string" + source-trustpilot-update: + title: "Trustpilot Spec" + type: "object" + required: + - "credentials" + - "business_units" + - "start_date" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth 2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "API key" + description: + "The API key of the Trustpilot API application. (represents\ + \ the OAuth Client ID)" + airbyte_secret: true + client_secret: + type: "string" + title: "Secret" + description: + "The Secret of the Trustpilot API application. (represents\ + \ the OAuth Client Secret)" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + title: "Token expiry date time" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - type: "object" + title: "API Key" + description: + "The API key authentication method gives you access to only\ + \ the streams which are part of the Public API. When you want to get\ + \ streams available via the Consumer API (e.g. the private reviews)\ + \ you need to use authentication method OAuth 2.0." + required: + - "client_id" + properties: + auth_type: + type: "string" + const: "apikey" + enum: + - "apikey" + client_id: + type: "string" + title: "API key" + description: "The API key of the Trustpilot API application." + airbyte_secret: true + business_units: + type: "array" + items: + type: "string" + title: "Business Unit names" + description: + "The names of business units which shall be synchronized. Some\ + \ streams e.g. configured_business_units or private_reviews use this configuration." + examples: + - "mydomain.com" + - "www.mydomain.com" + start_date: + type: "string" + title: "Start Date" + description: + "For streams with sync. method incremental the start date time\ + \ to be used" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "%Y-%m-%dT%H:%M:%SZ" + source-google-webfonts: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + order: 0 + x-speakeasy-param-sensitive: true + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + order: 1 + prettyPrint: + type: "string" + description: "Optional, boolean type" + order: 2 + sort: + type: "string" + description: "Optional, to find how to sort" + order: 3 + sourceType: + title: "google-webfonts" + const: "google-webfonts" + enum: + - "google-webfonts" + order: 0 + type: "string" + source-google-webfonts-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "API key is required to access google apis, For getting your's\ + \ goto google console and generate api key for Webfonts" + order: 0 + alt: + type: "string" + description: "Optional, Available params- json, media, proto" + order: 1 + prettyPrint: + type: "string" + description: "Optional, boolean type" + order: 2 + sort: + type: "string" + description: "Optional, to find how to sort" + order: 3 + source-pypi: + type: "object" + required: + - "project_name" + - "sourceType" + properties: + version: + type: "string" + title: "Package Version" + description: + "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + order: 1 + project_name: + type: "string" + title: "PyPI Package" + description: + "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + order: 0 + sourceType: + title: "pypi" + const: "pypi" + enum: + - "pypi" + order: 0 + type: "string" + source-pypi-update: + type: "object" + required: + - "project_name" + properties: + version: + type: "string" + title: "Package Version" + description: + "Version of the project/package. Use it to find a particular\ + \ release instead of all releases." + examples: + - "1.2.0" + order: 1 + project_name: + type: "string" + title: "PyPI Package" + description: + "Name of the project/package. Can only be in lowercase with\ + \ hyphen. This is the name used using pip command for installing the package." + examples: + - "sampleproject" + order: 0 + source-slack: + title: "Slack Spec" + type: "object" + required: + - "start_date" + - "lookback_window" + - "join_channels" + - "sourceType" + properties: + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + title: "Start Date" + format: "date-time" + lookback_window: + type: "integer" + title: "Threads Lookback window (Days)" + description: + "How far into the past to look for messages in threads, default\ + \ is 0 days" + examples: + - 7 + - 14 + minimum: 0 + default: 0 + maximum: 365 + join_channels: + type: "boolean" + default: true + title: "Join all channels" + description: + "Whether to join all channels or to sync data only from channels\ + \ the bot is already in. If false, you'll need to manually add the bot\ + \ to all the channels from which you'd like to sync messages. " + include_private_channels: + type: "boolean" + default: false + title: "Include private channels" + description: + "Whether to read information from private channels that the\ + \ bot is already in. If false, only public channels will be read. If\ + \ true, the bot must be manually added to private channels. " + channel_filter: + type: "array" + default: [] + items: + type: "string" + minLength: 0 + title: "Channel name filter" + description: + "A channel name list (without leading '#' char) which limit\ + \ the channels from which you'd like to sync. Empty list means no filter." + examples: + - "channel_one" + - "channel_two" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate into Slack" + type: "object" + oneOf: + - type: "object" + title: "Sign in via Slack (OAuth)" + required: + - "option_title" + - "client_id" + - "client_secret" + - "access_token" + properties: + option_title: + type: "string" + const: "Default OAuth2.0 authorization" + enum: + - "Default OAuth2.0 authorization" + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access token" + description: + "Slack access_token. See our docs if you need help generating the token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + - type: "object" + title: "API Token" + required: + - "option_title" + - "api_token" + properties: + option_title: + type: "string" + const: "API Token Credentials" + enum: + - "API Token Credentials" + api_token: + type: "string" + title: "API Token" + description: + "A Slack bot token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 1 + sourceType: + title: "slack" + const: "slack" + enum: + - "slack" + order: 0 + type: "string" + source-slack-update: + title: "Slack Spec" + type: "object" + required: + - "start_date" + - "lookback_window" + - "join_channels" + properties: + start_date: + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + title: "Start Date" + format: "date-time" + lookback_window: + type: "integer" + title: "Threads Lookback window (Days)" + description: + "How far into the past to look for messages in threads, default\ + \ is 0 days" + examples: + - 7 + - 14 + minimum: 0 + default: 0 + maximum: 365 + join_channels: + type: "boolean" + default: true + title: "Join all channels" + description: + "Whether to join all channels or to sync data only from channels\ + \ the bot is already in. If false, you'll need to manually add the bot\ + \ to all the channels from which you'd like to sync messages. " + include_private_channels: + type: "boolean" + default: false + title: "Include private channels" + description: + "Whether to read information from private channels that the\ + \ bot is already in. If false, only public channels will be read. If\ + \ true, the bot must be manually added to private channels. " + channel_filter: + type: "array" + default: [] + items: + type: "string" + minLength: 0 + title: "Channel name filter" + description: + "A channel name list (without leading '#' char) which limit\ + \ the channels from which you'd like to sync. Empty list means no filter." + examples: + - "channel_one" + - "channel_two" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate into Slack" + type: "object" + oneOf: + - type: "object" + title: "Sign in via Slack (OAuth)" + required: + - "option_title" + - "client_id" + - "client_secret" + - "access_token" + properties: + option_title: + type: "string" + const: "Default OAuth2.0 authorization" + enum: + - "Default OAuth2.0 authorization" + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + access_token: + type: "string" + title: "Access token" + description: + "Slack access_token. See our docs if you need help generating the token." + airbyte_secret: true + order: 0 + - type: "object" + title: "API Token" + required: + - "option_title" + - "api_token" + properties: + option_title: + type: "string" + const: "API Token Credentials" + enum: + - "API Token Credentials" + api_token: + type: "string" + title: "API Token" + description: + "A Slack bot token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 1 + source-file: + title: "File Source Spec" + type: "object" + required: + - "dataset_name" + - "format" + - "url" + - "provider" + - "sourceType" + properties: + dataset_name: + type: "string" + title: "Dataset Name" + description: + "The Name of the final table to replicate this file into (should\ + \ include letters, numbers dash and underscores only)." + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "excel_binary" + - "fwf" + - "feather" + - "parquet" + - "yaml" + default: "csv" + title: "File Format" + description: + "The Format of the file which should be replicated (Warning:\ + \ some formats may be experimental, please refer to the docs)." + reader_options: + type: "string" + title: "Reader Options" + description: + "This should be a string in JSON format. It depends on the\ + \ chosen file format to provide additional options and tune its behavior." + examples: + - "{}" + - '{"sep": " "}' + - "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"\ + ] }" + url: + type: "string" + title: "URL" + description: "The URL path to access the file which should be replicated." + examples: + - "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" + - "gs://my-google-bucket/data.csv" + - "s3://gdelt-open-data/events/20190914.export.csv" + provider: + type: "object" + title: "Storage Provider" + description: + "The storage Provider or Location of the file(s) which should\ + \ be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + const: "HTTPS" + enum: + - "HTTPS" + user_agent: + type: "boolean" + title: "User-Agent" + default: false + description: "Add User-Agent to request" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "GCS" + enum: + - "GCS" + service_account_json: + type: "string" + title: "Service Account JSON" + airbyte_secret: true + description: + "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + x-speakeasy-param-sensitive: true + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "S3" + enum: + - "S3" + aws_access_key_id: + type: "string" + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + title: "Storage" + const: "AzBlob" + enum: + - "AzBlob" + storage_account: + type: "string" + title: "Storage Account" + description: + "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + title: "SAS Token" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + shared_key: + type: "string" + title: "Shared Key" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SSH" + enum: + - "SSH" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SCP" + enum: + - "SCP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SFTP" + enum: + - "SFTP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + description: + "WARNING: Note that the local storage URL available for\ + \ reading must start with the local mount \"/local/\" at the moment\ + \ until we implement more advanced docker mounting options." + const: "local" + enum: + - "local" + sourceType: + title: "file" + const: "file" + enum: + - "file" + order: 0 + type: "string" + source-file-update: + title: "File Source Spec" + type: "object" + required: + - "dataset_name" + - "format" + - "url" + - "provider" + properties: + dataset_name: + type: "string" + title: "Dataset Name" + description: + "The Name of the final table to replicate this file into (should\ + \ include letters, numbers dash and underscores only)." + format: + type: "string" + enum: + - "csv" + - "json" + - "jsonl" + - "excel" + - "excel_binary" + - "fwf" + - "feather" + - "parquet" + - "yaml" + default: "csv" + title: "File Format" + description: + "The Format of the file which should be replicated (Warning:\ + \ some formats may be experimental, please refer to the docs)." + reader_options: + type: "string" + title: "Reader Options" + description: + "This should be a string in JSON format. It depends on the\ + \ chosen file format to provide additional options and tune its behavior." + examples: + - "{}" + - '{"sep": " "}' + - "{\"sep\": \"\t\", \"header\": 0, \"names\": [\"column1\", \"column2\"\ + ] }" + url: + type: "string" + title: "URL" + description: "The URL path to access the file which should be replicated." + examples: + - "https://storage.googleapis.com/covid19-open-data/v2/latest/epidemiology.csv" + - "gs://my-google-bucket/data.csv" + - "s3://gdelt-open-data/events/20190914.export.csv" + provider: + type: "object" + title: "Storage Provider" + description: + "The storage Provider or Location of the file(s) which should\ + \ be replicated." + default: "Public Web" + oneOf: + - title: "HTTPS: Public Web" + required: + - "storage" + properties: + storage: + type: "string" + const: "HTTPS" + enum: + - "HTTPS" + user_agent: + type: "boolean" + title: "User-Agent" + default: false + description: "Add User-Agent to request" + - title: "GCS: Google Cloud Storage" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "GCS" + enum: + - "GCS" + service_account_json: + type: "string" + title: "Service Account JSON" + airbyte_secret: true + description: + "In order to access private Buckets stored on Google\ + \ Cloud, this connector would need a service account json credentials\ + \ with the proper permissions as described here. Please generate the credentials.json\ + \ file and copy/paste its content to this field (expecting JSON\ + \ formats). If accessing publicly available data, this field is\ + \ not necessary." + - title: "S3: Amazon Web Services" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + const: "S3" + enum: + - "S3" + aws_access_key_id: + type: "string" + title: "AWS Access Key ID" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + aws_secret_access_key: + type: "string" + title: "AWS Secret Access Key" + description: + "In order to access private Buckets stored on AWS S3,\ + \ this connector would need credentials with the proper permissions.\ + \ If accessing publicly available data, this field is not necessary." + airbyte_secret: true + - title: "AzBlob: Azure Blob Storage" + required: + - "storage" + - "storage_account" + properties: + storage: + type: "string" + title: "Storage" + const: "AzBlob" + enum: + - "AzBlob" + storage_account: + type: "string" + title: "Storage Account" + description: + "The globally unique name of the storage account that\ + \ the desired blob sits within. See here for more details." + sas_token: + type: "string" + title: "SAS Token" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a SAS (Shared\ + \ Access Signature) token. If accessing publicly available data,\ + \ this field is not necessary." + airbyte_secret: true + shared_key: + type: "string" + title: "Shared Key" + description: + "To access Azure Blob Storage, this connector would need\ + \ credentials with the proper permissions. One option is a storage\ + \ account shared key (aka account key or access key). If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + - title: "SSH: Secure Shell" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SSH" + enum: + - "SSH" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SCP: Secure copy protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SCP" + enum: + - "SCP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "SFTP: Secure File Transfer Protocol" + required: + - "storage" + - "user" + - "host" + properties: + storage: + type: "string" + title: "Storage" + const: "SFTP" + enum: + - "SFTP" + user: + type: "string" + title: "User" + description: "" + password: + type: "string" + title: "Password" + description: "" + airbyte_secret: true + host: + type: "string" + title: "Host" + description: "" + port: + type: "string" + title: "Port" + default: "22" + description: "" + - title: "Local Filesystem (limited)" + required: + - "storage" + properties: + storage: + type: "string" + title: "Storage" + description: + "WARNING: Note that the local storage URL available for\ + \ reading must start with the local mount \"/local/\" at the moment\ + \ until we implement more advanced docker mounting options." + const: "local" + enum: + - "local" + source-lokalise: + type: "object" + required: + - "api_key" + - "project_id" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Lokalise API Key with read-access. Available at Profile settings\ + \ > API tokens. See here." + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + project_id: + type: "string" + description: "Lokalise project ID. Available at Project Settings > General." + title: "Project Id" + order: 1 + sourceType: + title: "lokalise" + const: "lokalise" + enum: + - "lokalise" + order: 0 + type: "string" + source-lokalise-update: + type: "object" + required: + - "api_key" + - "project_id" + properties: + api_key: + type: "string" + description: + "Lokalise API Key with read-access. Available at Profile settings\ + \ > API tokens. See here." + title: "API Key" + airbyte_secret: true + order: 0 + project_id: + type: "string" + description: "Lokalise project ID. Available at Project Settings > General." + title: "Project Id" + order: 1 + source-zoho-crm: + title: "Zoho Crm Configuration" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "environment" + - "dc_region" + - "edition" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "OAuth2.0 Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "OAuth2.0 Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "OAuth2.0 Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + dc_region: + title: "Data Center Location" + type: "string" + description: + "Please choose the region of your Data Center location. More\ + \ info by this Link" + enum: + - "US" + - "AU" + - "EU" + - "IN" + - "CN" + - "JP" + environment: + title: "Environment" + type: "string" + description: "Please choose the environment" + enum: + - "Production" + - "Developer" + - "Sandbox" + start_datetime: + title: "Start Date" + type: + - "string" + - "null" + examples: + - "2000-01-01" + - "2000-01-01 13:00" + - "2000-01-01 13:00:00" + - "2000-01-01T13:00+00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`" + format: "date-time" + edition: + title: "Zoho CRM Edition" + type: "string" + description: + "Choose your Edition of Zoho CRM to determine API Concurrency\ + \ Limits" + enum: + - "Free" + - "Standard" + - "Professional" + - "Enterprise" + - "Ultimate" + default: "Free" + sourceType: + title: "zoho-crm" + const: "zoho-crm" + enum: + - "zoho-crm" + order: 0 + type: "string" + source-zoho-crm-update: + title: "Zoho Crm Configuration" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "environment" + - "dc_region" + - "edition" + properties: + client_id: + type: "string" + title: "Client ID" + description: "OAuth2.0 Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "OAuth2.0 Client Secret" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "OAuth2.0 Refresh Token" + airbyte_secret: true + dc_region: + title: "Data Center Location" + type: "string" + description: + "Please choose the region of your Data Center location. More\ + \ info by this Link" + enum: + - "US" + - "AU" + - "EU" + - "IN" + - "CN" + - "JP" + environment: + title: "Environment" + type: "string" + description: "Please choose the environment" + enum: + - "Production" + - "Developer" + - "Sandbox" + start_datetime: + title: "Start Date" + type: + - "string" + - "null" + examples: + - "2000-01-01" + - "2000-01-01 13:00" + - "2000-01-01 13:00:00" + - "2000-01-01T13:00+00:00" + - "2000-01-01T13:00:00-07:00" + description: "ISO 8601, for instance: `YYYY-MM-DD`, `YYYY-MM-DD HH:MM:SS+HH:MM`" + format: "date-time" + edition: + title: "Zoho CRM Edition" + type: "string" + description: + "Choose your Edition of Zoho CRM to determine API Concurrency\ + \ Limits" + enum: + - "Free" + - "Standard" + - "Professional" + - "Enterprise" + - "Ultimate" + default: "Free" + source-gainsight-px: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "The Aptrinsic API Key which is recieved from the dashboard\ + \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "gainsight-px" + const: "gainsight-px" + enum: + - "gainsight-px" + order: 0 + type: "string" + source-gainsight-px-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "The Aptrinsic API Key which is recieved from the dashboard\ + \ settings (ref - https://app.aptrinsic.com/settings/api-keys)" + order: 0 + source-stripe: + title: "Stripe Source Spec" + type: "object" + required: + - "client_secret" + - "account_id" + - "sourceType" + properties: + account_id: + type: "string" + title: "Account ID" + description: + "Your Stripe account ID (starts with 'acct_', find yours here)." + order: 0 + client_secret: + type: "string" + title: "Secret Key" + description: + "Stripe API key (usually starts with 'sk_live_'; find yours\ + \ here)." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ + \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window in days" + default: 0 + minimum: 0 + description: + "When set, the connector will always re-export data from the\ + \ past N days, where N is the value set here. This is useful if your data\ + \ is frequently updated after creation. The Lookback Window only applies\ + \ to streams that do not support event-based incremental syncs: Events,\ + \ SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks,\ + \ Refunds. More info here" + order: 3 + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 365 + minimum: 1 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + description: + "The time increment used by the connector when requesting data\ + \ from the Stripe API. The bigger the value is, the less requests will\ + \ be made and faster the sync will be. On the other hand, the more seldom\ + \ the state is persisted." + order: 4 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 20 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker thread to use for the sync. The performance\ + \ upper boundary depends on call_rate_limit setting and type of account." + order: 5 + call_rate_limit: + type: "integer" + title: "Max number of API calls per second" + examples: + - 25 + - 100 + description: + "The number of API calls per second that you allow connector\ + \ to make. This value can not be bigger than real API call rate limit\ + \ (https://stripe.com/docs/rate-limits). If not specified the default\ + \ maximum is 25 and 100 calls per second for test and production tokens\ + \ respectively." + sourceType: + title: "stripe" + const: "stripe" + enum: + - "stripe" + order: 0 + type: "string" + source-stripe-update: + title: "Stripe Source Spec" + type: "object" + required: + - "client_secret" + - "account_id" + properties: + account_id: + type: "string" + title: "Account ID" + description: + "Your Stripe account ID (starts with 'acct_', find yours here)." + order: 0 + client_secret: + type: "string" + title: "Secret Key" + description: + "Stripe API key (usually starts with 'sk_live_'; find yours\ + \ here)." + airbyte_secret: true + order: 1 + start_date: + type: "string" + title: "Replication start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ + \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + order: 2 + lookback_window_days: + type: "integer" + title: "Lookback Window in days" + default: 0 + minimum: 0 + description: + "When set, the connector will always re-export data from the\ + \ past N days, where N is the value set here. This is useful if your data\ + \ is frequently updated after creation. The Lookback Window only applies\ + \ to streams that do not support event-based incremental syncs: Events,\ + \ SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks,\ + \ Refunds. More info here" + order: 3 + slice_range: + type: "integer" + title: "Data request time increment in days" + default: 365 + minimum: 1 + examples: + - 1 + - 3 + - 10 + - 30 + - 180 + - 360 + description: + "The time increment used by the connector when requesting data\ + \ from the Stripe API. The bigger the value is, the less requests will\ + \ be made and faster the sync will be. On the other hand, the more seldom\ + \ the state is persisted." + order: 4 + num_workers: + type: "integer" + title: "Number of concurrent workers" + minimum: 1 + maximum: 20 + default: 10 + examples: + - 1 + - 2 + - 3 + description: + "The number of worker thread to use for the sync. The performance\ + \ upper boundary depends on call_rate_limit setting and type of account." + order: 5 + call_rate_limit: + type: "integer" + title: "Max number of API calls per second" + examples: + - 25 + - 100 + description: + "The number of API calls per second that you allow connector\ + \ to make. This value can not be bigger than real API call rate limit\ + \ (https://stripe.com/docs/rate-limits). If not specified the default\ + \ maximum is 25 and 100 calls per second for test and production tokens\ + \ respectively." + source-buzzsprout: + type: "object" + required: + - "api_key" + - "podcast_id" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + podcast_id: + type: "string" + description: "Podcast ID found in `https://www.buzzsprout.com/my/profile/api`" + title: "Podcast ID" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "buzzsprout" + const: "buzzsprout" + enum: + - "buzzsprout" + order: 0 + type: "string" + source-buzzsprout-update: + type: "object" + required: + - "api_key" + - "podcast_id" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + podcast_id: + type: "string" + description: "Podcast ID found in `https://www.buzzsprout.com/my/profile/api`" + title: "Podcast ID" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-youtube-analytics: + title: "YouTube Analytics Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "youtube-analytics" + const: "youtube-analytics" + enum: + - "youtube-analytics" + order: 0 + type: "string" + source-youtube-analytics-update: + title: "YouTube Analytics Spec" + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authenticate via OAuth 2.0" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + additionalProperties: true + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID and\ + \ secret" + airbyte_secret: true + source-google-sheets: + title: "Google Sheets Source Spec" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + - "sourceType" + properties: + batch_size: + type: "integer" + title: "Row Batch Size" + description: + "Default value is 200. An integer representing row batch size\ + \ for each sent request to Google Sheets API. Row batch size means how\ + \ many rows are processed from the google sheet, for example default value\ + \ 200 would process rows 1-201, then 201-401 and so on. Based on Google\ + \ Sheets API limits documentation, it is possible to send up to 300\ + \ requests per minute, but each individual request has to be processed\ + \ under 180 seconds, otherwise the request returns a timeout error. In\ + \ regards to this information, consider network speed and number of columns\ + \ of the google sheet when deciding a batch_size value. Default value\ + \ should cover most of the cases, but if a google sheet has over 100,000\ + \ records or more, consider increasing batch_size value." + default: 200 + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "Enter the link to the Google spreadsheet you want to sync.\ + \ To copy the link, click the 'Share' button in the top-right corner of\ + \ the spreadsheet, then click 'Copy link'." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" + names_conversion: + type: "boolean" + title: "Convert Column Names to SQL-Compliant Format" + description: + "Enables the conversion of column names to a standardized,\ + \ SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this\ + \ option if your destination is SQL-based." + default: false + credentials: + type: "object" + title: "Authentication" + description: "Credentials for connecting to the Google Sheets API" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Enter your Google application's refresh token. See Google's\ + \ documentation for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + service_account_info: + type: "string" + title: "Service Account Information." + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + x-speakeasy-param-sensitive: true + sourceType: + title: "google-sheets" + const: "google-sheets" + enum: + - "google-sheets" + order: 0 + type: "string" + source-google-sheets-update: + title: "Google Sheets Source Spec" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + properties: + batch_size: + type: "integer" + title: "Row Batch Size" + description: + "Default value is 200. An integer representing row batch size\ + \ for each sent request to Google Sheets API. Row batch size means how\ + \ many rows are processed from the google sheet, for example default value\ + \ 200 would process rows 1-201, then 201-401 and so on. Based on Google\ + \ Sheets API limits documentation, it is possible to send up to 300\ + \ requests per minute, but each individual request has to be processed\ + \ under 180 seconds, otherwise the request returns a timeout error. In\ + \ regards to this information, consider network speed and number of columns\ + \ of the google sheet when deciding a batch_size value. Default value\ + \ should cover most of the cases, but if a google sheet has over 100,000\ + \ records or more, consider increasing batch_size value." + default: 200 + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "Enter the link to the Google spreadsheet you want to sync.\ + \ To copy the link, click the 'Share' button in the top-right corner of\ + \ the spreadsheet, then click 'Copy link'." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" + names_conversion: + type: "boolean" + title: "Convert Column Names to SQL-Compliant Format" + description: + "Enables the conversion of column names to a standardized,\ + \ SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this\ + \ option if your destination is SQL-based." + default: false + credentials: + type: "object" + title: "Authentication" + description: "Credentials for connecting to the Google Sheets API" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "Enter your Google application's refresh token. See Google's\ + \ documentation for more information." + airbyte_secret: true + - title: "Service Account Key Authentication" + type: "object" + required: + - "auth_type" + - "service_account_info" + properties: + auth_type: + type: "string" + const: "Service" + enum: + - "Service" + service_account_info: + type: "string" + title: "Service Account Information." + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + source-zendesk-talk: + type: "object" + title: "Source Zendesk Talk Spec" + required: + - "start_date" + - "subdomain" + - "sourceType" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: + "This is your Zendesk subdomain that can be found in your account\ + \ URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN\ + \ is the value of your subdomain." + credentials: + title: "Authentication" + type: "object" + order: 1 + description: + "Zendesk service provides two authentication methods. Choose\ + \ between: `OAuth2.0` or `API token`." + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "api_token" + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + examples: + - "2020-10-15T00:00:00Z" + sourceType: + title: "zendesk-talk" + const: "zendesk-talk" + enum: + - "zendesk-talk" + order: 0 + type: "string" + source-zendesk-talk-update: + type: "object" + title: "Source Zendesk Talk Spec" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: + "This is your Zendesk subdomain that can be found in your account\ + \ URL. For example, in https://{MY_SUBDOMAIN}.zendesk.com/, where MY_SUBDOMAIN\ + \ is the value of your subdomain." + credentials: + title: "Authentication" + type: "object" + order: 1 + description: + "Zendesk service provides two authentication methods. Choose\ + \ between: `OAuth2.0` or `API token`." + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "api_token" + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See the docs\ + \ for more information." + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Zendesk\ + \ Talk API, in the format YYYY-MM-DDT00:00:00Z. All data generated after\ + \ this date will be replicated." + examples: + - "2020-10-15T00:00:00Z" + source-freshdesk: + type: "object" + required: + - "api_key" + - "domain" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain: + type: "string" + order: 2 + title: "Domain" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: "integer" + order: 3 + title: "Requests per minute" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: + "UTC date and time. Any data created after this date will be\ + \ replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: "integer" + order: 5 + title: "Lookback Window" + default: 14 + description: + "Number of days for lookback window for the stream Satisfaction\ + \ Ratings" + sourceType: + title: "freshdesk" + const: "freshdesk" + enum: + - "freshdesk" + order: 0 + type: "string" + source-freshdesk-update: + type: "object" + required: + - "api_key" + - "domain" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshdesk API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + domain: + type: "string" + order: 2 + title: "Domain" + description: "Freshdesk domain" + examples: + - "myaccount.freshdesk.com" + pattern: "^[a-zA-Z0-9._-]*\\.freshdesk\\.com$" + requests_per_minute: + type: "integer" + order: 3 + title: "Requests per minute" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + type: "string" + order: 4 + description: + "UTC date and time. Any data created after this date will be\ + \ replicated. If this parameter is not set, all data will be replicated." + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2020-12-01T00:00:00Z" + lookback_window_in_days: + type: "integer" + order: 5 + title: "Lookback Window" + default: 14 + description: + "Number of days for lookback window for the stream Satisfaction\ + \ Ratings" + source-asana: + title: "Asana Spec" + type: "object" + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: + "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_export_ids: + title: "Organization Export IDs" + description: "Globally unique identifiers for the organization exports" + type: "array" + sourceType: + title: "asana" + const: "asana" + enum: + - "asana" + order: 0 + type: "string" + source-asana-update: + title: "Asana Spec" + type: "object" + properties: + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Github" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Asana (Oauth)" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "OAuth Credentials" + const: "OAuth Credentials" + enum: + - "OAuth Credentials" + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + refresh_token: + type: "string" + title: "" + description: "" + airbyte_secret: true + - type: "object" + title: "Authenticate with Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + title: "Credentials title" + description: "PAT Credentials" + const: "PAT Credentials" + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Token" + description: + "Asana Personal Access Token (generate yours here)." + airbyte_secret: true + organization_export_ids: + title: "Organization Export IDs" + description: "Globally unique identifiers for the organization exports" + type: "array" + source-posthog: + title: "PostHog Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + start_date: + title: "Start Date" + type: "string" + description: + "The date from which you'd like to replicate the data. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + api_key: + type: "string" + airbyte_secret: true + title: "API Key" + description: + "API Key. See the docs for information on how to generate this key." + x-speakeasy-param-sensitive: true + base_url: + type: "string" + default: "https://app.posthog.com" + title: "Base URL" + description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." + examples: + - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 + sourceType: + title: "posthog" + const: "posthog" + enum: + - "posthog" + order: 0 + type: "string" + source-posthog-update: + title: "PostHog Spec" + type: "object" + required: + - "api_key" + - "start_date" + properties: + start_date: + title: "Start Date" + type: "string" + description: + "The date from which you'd like to replicate the data. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + api_key: + type: "string" + airbyte_secret: true + title: "API Key" + description: + "API Key. See the docs for information on how to generate this key." + base_url: + type: "string" + default: "https://app.posthog.com" + title: "Base URL" + description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." + examples: + - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 + source-split-io: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "split-io" + const: "split-io" + enum: + - "split-io" + order: 0 + type: "string" + source-split-io-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-getlago: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_url: + type: "string" + description: "Your Lago API URL" + title: "API Url" + default: "https://api.getlago.com/api/v1" + order: 0 + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "getlago" + const: "getlago" + enum: + - "getlago" + order: 0 + type: "string" + source-getlago-update: + type: "object" + required: + - "api_key" + properties: + api_url: + type: "string" + description: "Your Lago API URL" + title: "API Url" + default: "https://api.getlago.com/api/v1" + order: 0 + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 1 + source-gridly: + title: "Gridly Spec" + type: "object" + required: + - "api_key" + - "grid_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + grid_id: + type: "string" + title: "Grid ID" + description: "ID of a grid, or can be ID of a branch" + sourceType: + title: "gridly" + const: "gridly" + enum: + - "gridly" + order: 0 + type: "string" + source-gridly-update: + title: "Gridly Spec" + type: "object" + required: + - "api_key" + - "grid_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + grid_id: + type: "string" + title: "Grid ID" + description: "ID of a grid, or can be ID of a branch" + source-microsoft-teams: + title: "Microsoft Teams Spec" + type: "object" + required: + - "period" + - "sourceType" + properties: + period: + type: "string" + title: "Period" + description: + "Specifies the length of time over which the Team Device Report\ + \ stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - "D7" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Microsoft" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Microsoft (OAuth 2.0)" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A Refresh Token to renew the expired Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate via Microsoft" + required: + - "tenant_id" + - "client_id" + - "client_secret" + properties: + auth_type: + type: "string" + const: "Token" + enum: + - "Token" + default: "Token" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "microsoft-teams" + const: "microsoft-teams" + enum: + - "microsoft-teams" + order: 0 + type: "string" + source-microsoft-teams-update: + title: "Microsoft Teams Spec" + type: "object" + required: + - "period" + properties: + period: + type: "string" + title: "Period" + description: + "Specifies the length of time over which the Team Device Report\ + \ stream is aggregated. The supported values are: D7, D30, D90, and D180." + examples: + - "D7" + credentials: + title: "Authentication mechanism" + description: "Choose how to authenticate to Microsoft" + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Microsoft (OAuth 2.0)" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + default: "Client" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "A Refresh Token to renew the expired Access Token." + airbyte_secret: true + - type: "object" + title: "Authenticate via Microsoft" + required: + - "tenant_id" + - "client_id" + - "client_secret" + properties: + auth_type: + type: "string" + const: "Token" + enum: + - "Token" + default: "Token" + order: 0 + tenant_id: + title: "Directory (tenant) ID" + type: "string" + description: + "A globally unique identifier (GUID) that is different\ + \ than your organization name or domain. Follow these steps to obtain:\ + \ open one of the Teams where you belong inside the Teams Application\ + \ -> Click on the … next to the Team title -> Click on Get link\ + \ to team -> Copy the link to the team and grab the tenant ID form\ + \ the URL" + airbyte_secret: true + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Microsoft Teams developer\ + \ application." + airbyte_secret: true + source-looker: + type: "object" + required: + - "client_id" + - "client_secret" + - "domain" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + description: + "The Client ID is first part of an API3 key that is specific\ + \ to each Looker user. See the docs for more information on how to generate this key." + client_secret: + type: "string" + order: 1 + title: "Client Secret" + description: "The Client Secret is second part of an API3 key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain: + type: "string" + order: 2 + title: "Domain" + examples: + - "domainname.looker.com" + - "looker.clientname.com" + - "123.123.124.123:8000" + description: + "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\ + \ address" + run_look_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + order: 3 + title: "Look IDs to Run" + description: "The IDs of any Looks to run" + sourceType: + title: "looker" + const: "looker" + enum: + - "looker" + order: 0 + type: "string" + source-looker-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "domain" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + description: + "The Client ID is first part of an API3 key that is specific\ + \ to each Looker user. See the docs for more information on how to generate this key." + client_secret: + type: "string" + order: 1 + title: "Client Secret" + description: "The Client Secret is second part of an API3 key." + airbyte_secret: true + domain: + type: "string" + order: 2 + title: "Domain" + examples: + - "domainname.looker.com" + - "looker.clientname.com" + - "123.123.124.123:8000" + description: + "Domain for your Looker account, e.g. airbyte.cloud.looker.com,looker.[clientname].com,IP\ + \ address" + run_look_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + order: 3 + title: "Look IDs to Run" + description: "The IDs of any Looks to run" + source-dropbox-sign: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://app.hellosign.com/home/myAccount#api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "dropbox-sign" + const: "dropbox-sign" + enum: + - "dropbox-sign" + order: 0 + type: "string" + source-dropbox-sign-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: "API key to use. Find it at https://app.hellosign.com/home/myAccount#api" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-google-tasks: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + records_limit: + type: "string" + description: "The maximum number of records to be returned per request" + order: 0 + title: "Records Limit" + default: "50" + sourceType: + title: "google-tasks" + const: "google-tasks" + enum: + - "google-tasks" + order: 0 + type: "string" + source-google-tasks-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + records_limit: + type: "string" + description: "The maximum number of records to be returned per request" + order: 0 + title: "Records Limit" + default: "50" + source-amazon-seller-partner: + title: "Amazon Seller Partner Spec" + type: "object" + required: + - "aws_environment" + - "region" + - "account_type" + - "lwa_app_id" + - "lwa_client_secret" + - "refresh_token" + - "sourceType" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + aws_environment: + title: "AWS Environment" + description: "Select the AWS Environment." + enum: + - "PRODUCTION" + - "SANDBOX" + default: "PRODUCTION" + type: "string" + order: 1 + region: + title: "AWS Region" + description: "Select the AWS Region." + enum: + - "AE" + - "AU" + - "BE" + - "BR" + - "CA" + - "DE" + - "EG" + - "ES" + - "FR" + - "GB" + - "IN" + - "IT" + - "JP" + - "MX" + - "NL" + - "PL" + - "SA" + - "SE" + - "SG" + - "TR" + - "UK" + - "US" + default: "US" + type: "string" + order: 2 + account_type: + title: "AWS Seller Partner Account Type" + description: + "Type of the Account you're going to authorize the Airbyte\ + \ application by" + enum: + - "Seller" + - "Vendor" + default: "Seller" + type: "string" + order: 3 + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "The Refresh Token obtained via OAuth flow authorization." + airbyte_secret: true + order: 6 + type: "string" + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If start date is not provided\ + \ or older than 2 years ago from today, the date 2 years ago from today\ + \ will be used." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + order: 7 + type: "string" + format: "date-time" + replication_end_date: + title: "End Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$" + examples: + - "2017-01-25T00:00:00Z" + order: 8 + type: "string" + format: "date-time" + period_in_days: + title: "Period In Days" + type: "integer" + description: + "For syncs spanning a large date range, this option is used\ + \ to request data in a smaller fixed window to improve sync reliability.\ + \ This time window can be configured granularly by day." + default: 90 + minimum: 1 + order: 9 + report_options_list: + title: "Report Options" + description: + "Additional information passed to reports. This varies by report\ + \ type." + order: 10 + type: "array" + items: + type: "object" + title: "Report Options" + required: + - "report_name" + - "stream_name" + - "options_list" + properties: + report_name: + title: "Report Name" + type: "string" + order: 0 + enum: + - "GET_AFN_INVENTORY_DATA" + - "GET_AFN_INVENTORY_DATA_BY_COUNTRY" + - "GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL" + - "GET_FBA_ESTIMATED_FBA_FEES_TXT_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_PROMOTION_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_REPLACEMENT_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_ORDER_DETAIL_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_SHIPMENT_DETAIL_DATA" + - "GET_FBA_INVENTORY_PLANNING_DATA" + - "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA" + - "GET_FBA_REIMBURSEMENTS_DATA" + - "GET_FBA_SNS_FORECAST_DATA" + - "GET_FBA_SNS_PERFORMANCE_DATA" + - "GET_FBA_STORAGE_FEE_CHARGES_DATA" + - "GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_FLAT_FILE_ARCHIVED_ORDERS_DATA_BY_ORDER_DATE" + - "GET_FLAT_FILE_OPEN_LISTINGS_DATA" + - "GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE" + - "GET_LEDGER_DETAIL_VIEW_DATA" + - "GET_LEDGER_SUMMARY_VIEW_DATA" + - "GET_MERCHANT_CANCELLED_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_ALL_DATA" + - "GET_MERCHANT_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT" + - "GET_MERCHANT_LISTINGS_INACTIVE_DATA" + - "GET_MERCHANTS_LISTINGS_FYP_REPORT" + - "GET_ORDER_REPORT_DATA_SHIPPING" + - "GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT" + - "GET_SELLER_FEEDBACK_DATA" + - "GET_STRANDED_INVENTORY_UI_DATA" + - "GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE" + - "GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_XML_BROWSE_TREE_DATA" + - "GET_VENDOR_REAL_TIME_INVENTORY_REPORT" + - "GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT" + - "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + - "GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT" + - "GET_SALES_AND_TRAFFIC_REPORT" + - "GET_VENDOR_SALES_REPORT" + - "GET_VENDOR_INVENTORY_REPORT" + - "GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT" + - "GET_VENDOR_TRAFFIC_REPORT" + stream_name: + title: "Stream Name" + type: "string" + order: 1 + options_list: + title: "List of options" + description: "List of options" + type: "array" + order: 2 + items: + type: "object" + required: + - "option_name" + - "option_value" + properties: + option_name: + title: "Name" + type: "string" + order: 0 + option_value: + title: "Value" + type: "string" + order: 1 + wait_to_avoid_fatal_errors: + title: "Wait between requests to avoid fatal statuses in reports" + type: "boolean" + description: + "For report based streams with known amount of requests per\ + \ time period, this option will use waiting time between requests to avoid\ + \ fatal statuses in reports. See Troubleshooting section for more details" + default: false + order: 11 + sourceType: + title: "amazon-seller-partner" + const: "amazon-seller-partner" + enum: + - "amazon-seller-partner" + order: 0 + type: "string" + source-amazon-seller-partner-update: + title: "Amazon Seller Partner Spec" + type: "object" + required: + - "aws_environment" + - "region" + - "account_type" + - "lwa_app_id" + - "lwa_client_secret" + - "refresh_token" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + aws_environment: + title: "AWS Environment" + description: "Select the AWS Environment." + enum: + - "PRODUCTION" + - "SANDBOX" + default: "PRODUCTION" + type: "string" + order: 1 + region: + title: "AWS Region" + description: "Select the AWS Region." + enum: + - "AE" + - "AU" + - "BE" + - "BR" + - "CA" + - "DE" + - "EG" + - "ES" + - "FR" + - "GB" + - "IN" + - "IT" + - "JP" + - "MX" + - "NL" + - "PL" + - "SA" + - "SE" + - "SG" + - "TR" + - "UK" + - "US" + default: "US" + type: "string" + order: 2 + account_type: + title: "AWS Seller Partner Account Type" + description: + "Type of the Account you're going to authorize the Airbyte\ + \ application by" + enum: + - "Seller" + - "Vendor" + default: "Seller" + type: "string" + order: 3 + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + refresh_token: + title: "Refresh Token" + description: "The Refresh Token obtained via OAuth flow authorization." + airbyte_secret: true + order: 6 + type: "string" + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If start date is not provided\ + \ or older than 2 years ago from today, the date 2 years ago from today\ + \ will be used." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + order: 7 + type: "string" + format: "date-time" + replication_end_date: + title: "End Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data after this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$|^$" + examples: + - "2017-01-25T00:00:00Z" + order: 8 + type: "string" + format: "date-time" + period_in_days: + title: "Period In Days" + type: "integer" + description: + "For syncs spanning a large date range, this option is used\ + \ to request data in a smaller fixed window to improve sync reliability.\ + \ This time window can be configured granularly by day." + default: 90 + minimum: 1 + order: 9 + report_options_list: + title: "Report Options" + description: + "Additional information passed to reports. This varies by report\ + \ type." + order: 10 + type: "array" + items: + type: "object" + title: "Report Options" + required: + - "report_name" + - "stream_name" + - "options_list" + properties: + report_name: + title: "Report Name" + type: "string" + order: 0 + enum: + - "GET_AFN_INVENTORY_DATA" + - "GET_AFN_INVENTORY_DATA_BY_COUNTRY" + - "GET_AMAZON_FULFILLED_SHIPMENTS_DATA_GENERAL" + - "GET_FBA_ESTIMATED_FBA_FEES_TXT_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_RETURNS_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_PROMOTION_DATA" + - "GET_FBA_FULFILLMENT_CUSTOMER_SHIPMENT_REPLACEMENT_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_ORDER_DETAIL_DATA" + - "GET_FBA_FULFILLMENT_REMOVAL_SHIPMENT_DETAIL_DATA" + - "GET_FBA_INVENTORY_PLANNING_DATA" + - "GET_FBA_MYI_UNSUPPRESSED_INVENTORY_DATA" + - "GET_FBA_REIMBURSEMENTS_DATA" + - "GET_FBA_SNS_FORECAST_DATA" + - "GET_FBA_SNS_PERFORMANCE_DATA" + - "GET_FBA_STORAGE_FEE_CHARGES_DATA" + - "GET_FLAT_FILE_ACTIONABLE_ORDER_DATA_SHIPPING" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_LAST_UPDATE_GENERAL" + - "GET_FLAT_FILE_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_FLAT_FILE_ARCHIVED_ORDERS_DATA_BY_ORDER_DATE" + - "GET_FLAT_FILE_OPEN_LISTINGS_DATA" + - "GET_FLAT_FILE_RETURNS_DATA_BY_RETURN_DATE" + - "GET_LEDGER_DETAIL_VIEW_DATA" + - "GET_LEDGER_SUMMARY_VIEW_DATA" + - "GET_MERCHANT_CANCELLED_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_ALL_DATA" + - "GET_MERCHANT_LISTINGS_DATA" + - "GET_MERCHANT_LISTINGS_DATA_BACK_COMPAT" + - "GET_MERCHANT_LISTINGS_INACTIVE_DATA" + - "GET_MERCHANTS_LISTINGS_FYP_REPORT" + - "GET_ORDER_REPORT_DATA_SHIPPING" + - "GET_RESTOCK_INVENTORY_RECOMMENDATIONS_REPORT" + - "GET_SELLER_FEEDBACK_DATA" + - "GET_STRANDED_INVENTORY_UI_DATA" + - "GET_V2_SETTLEMENT_REPORT_DATA_FLAT_FILE" + - "GET_XML_ALL_ORDERS_DATA_BY_ORDER_DATE_GENERAL" + - "GET_XML_BROWSE_TREE_DATA" + - "GET_VENDOR_REAL_TIME_INVENTORY_REPORT" + - "GET_BRAND_ANALYTICS_MARKET_BASKET_REPORT" + - "GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT" + - "GET_BRAND_ANALYTICS_REPEAT_PURCHASE_REPORT" + - "GET_SALES_AND_TRAFFIC_REPORT" + - "GET_VENDOR_SALES_REPORT" + - "GET_VENDOR_INVENTORY_REPORT" + - "GET_VENDOR_NET_PURE_PRODUCT_MARGIN_REPORT" + - "GET_VENDOR_TRAFFIC_REPORT" + stream_name: + title: "Stream Name" + type: "string" + order: 1 + options_list: + title: "List of options" + description: "List of options" + type: "array" + order: 2 + items: + type: "object" + required: + - "option_name" + - "option_value" + properties: + option_name: + title: "Name" + type: "string" + order: 0 + option_value: + title: "Value" + type: "string" + order: 1 + wait_to_avoid_fatal_errors: + title: "Wait between requests to avoid fatal statuses in reports" + type: "boolean" + description: + "For report based streams with known amount of requests per\ + \ time period, this option will use waiting time between requests to avoid\ + \ fatal statuses in reports. See Troubleshooting section for more details" + default: false + order: 11 + source-northpass-lms: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "northpass-lms" + const: "northpass-lms" + enum: + - "northpass-lms" + order: 0 + type: "string" + source-northpass-lms-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + source-recreation: + type: "object" + required: + - "apikey" + - "sourceType" + properties: + apikey: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + query_campsites: + type: "string" + title: "Query Campsite" + order: 1 + sourceType: + title: "recreation" + const: "recreation" + enum: + - "recreation" + order: 0 + type: "string" + source-recreation-update: + type: "object" + required: + - "apikey" + properties: + apikey: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + query_campsites: + type: "string" + title: "Query Campsite" + order: 1 + source-breezy-hr: + type: "object" + required: + - "api_key" + - "company_id" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + company_id: + type: "string" + order: 1 + title: "Company ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "breezy-hr" + const: "breezy-hr" + enum: + - "breezy-hr" + order: 0 + type: "string" + source-breezy-hr-update: + type: "object" + required: + - "api_key" + - "company_id" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + company_id: + type: "string" + order: 1 + title: "Company ID" + airbyte_secret: true + source-linkedin-ads: + title: "Linkedin Ads Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Access Token" + type: "object" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + examples: + - "2021-05-17" + format: "date" + lookback_window: + type: "integer" + title: "Lookback Window" + default: 0 + minimum: 0 + description: "How far into the past to look for records. (in days)" + account_ids: + title: "Account IDs" + type: "array" + description: + "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." + items: + type: "integer" + examples: + - 123456789 + default: [] + ad_analytics_reports: + title: "Custom Ad Analytics Reports" + type: "array" + items: + type: "object" + title: "Ad Analytics Report Configuration" + description: "Config for custom ad Analytics Report" + required: + - "name" + - "pivot_by" + - "time_granularity" + properties: + name: + title: "Report Name" + description: "The name for the custom report." + type: "string" + pivot_by: + title: "Pivot Category" + description: + "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." + type: "string" + enum: + - "COMPANY" + - "ACCOUNT" + - "SHARE" + - "CAMPAIGN" + - "CREATIVE" + - "CAMPAIGN_GROUP" + - "CONVERSION" + - "CONVERSATION_NODE" + - "CONVERSATION_NODE_OPTION_INDEX" + - "SERVING_LOCATION" + - "CARD_INDEX" + - "MEMBER_COMPANY_SIZE" + - "MEMBER_INDUSTRY" + - "MEMBER_SENIORITY" + - "MEMBER_JOB_TITLE" + - "MEMBER_JOB_FUNCTION" + - "MEMBER_COUNTRY_V2" + - "MEMBER_REGION_V2" + - "MEMBER_COMPANY" + - "PLACEMENT_NAME" + - "IMPRESSION_DEVICE_TYPE" + time_granularity: + title: "Time Granularity" + description: + "Choose how to group the data in your report by time.\ + \ The options are:
    - 'ALL': A single result summarizing the entire\ + \ time range.
    - 'DAILY': Group results by each day.
    - 'MONTHLY':\ + \ Group results by each month.
    - 'YEARLY': Group results by each\ + \ year.
    Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." + type: "string" + enum: + - "ALL" + - "DAILY" + - "MONTHLY" + - "YEARLY" + default: [] + sourceType: + title: "linkedin-ads" + const: "linkedin-ads" + enum: + - "linkedin-ads" + order: 0 + type: "string" + source-linkedin-ads-update: + title: "Linkedin Ads Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oAuth2.0" + enum: + - "oAuth2.0" + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + - title: "Access Token" + type: "object" + required: + - "access_token" + properties: + auth_method: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Access Token" + description: + "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated." + examples: + - "2021-05-17" + format: "date" + lookback_window: + type: "integer" + title: "Lookback Window" + default: 0 + minimum: 0 + description: "How far into the past to look for records. (in days)" + account_ids: + title: "Account IDs" + type: "array" + description: + "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." + items: + type: "integer" + examples: + - 123456789 + default: [] + ad_analytics_reports: + title: "Custom Ad Analytics Reports" + type: "array" + items: + type: "object" + title: "Ad Analytics Report Configuration" + description: "Config for custom ad Analytics Report" + required: + - "name" + - "pivot_by" + - "time_granularity" + properties: + name: + title: "Report Name" + description: "The name for the custom report." + type: "string" + pivot_by: + title: "Pivot Category" + description: + "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." + type: "string" + enum: + - "COMPANY" + - "ACCOUNT" + - "SHARE" + - "CAMPAIGN" + - "CREATIVE" + - "CAMPAIGN_GROUP" + - "CONVERSION" + - "CONVERSATION_NODE" + - "CONVERSATION_NODE_OPTION_INDEX" + - "SERVING_LOCATION" + - "CARD_INDEX" + - "MEMBER_COMPANY_SIZE" + - "MEMBER_INDUSTRY" + - "MEMBER_SENIORITY" + - "MEMBER_JOB_TITLE" + - "MEMBER_JOB_FUNCTION" + - "MEMBER_COUNTRY_V2" + - "MEMBER_REGION_V2" + - "MEMBER_COMPANY" + - "PLACEMENT_NAME" + - "IMPRESSION_DEVICE_TYPE" + time_granularity: + title: "Time Granularity" + description: + "Choose how to group the data in your report by time.\ + \ The options are:
    - 'ALL': A single result summarizing the entire\ + \ time range.
    - 'DAILY': Group results by each day.
    - 'MONTHLY':\ + \ Group results by each month.
    - 'YEARLY': Group results by each\ + \ year.
    Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." + type: "string" + enum: + - "ALL" + - "DAILY" + - "MONTHLY" + - "YEARLY" + default: [] + source-us-census: + type: "object" + required: + - "query_path" + - "api_key" + - "sourceType" + properties: + query_params: + type: "string" + description: + "The query parameters portion of the GET request, without the\ + \ api key" + order: 0 + pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$" + examples: + - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" + - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path: + type: "string" + description: "The path portion of the GET request" + order: 1 + pattern: "^data(\\/[\\w\\d]+)+$" + examples: + - "data/2019/cbp" + - "data/2018/acs" + - "data/timeseries/healthins/sahie" + api_key: + type: "string" + description: + "Your API Key. Get your key here." + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "us-census" + const: "us-census" + enum: + - "us-census" + order: 0 + type: "string" + source-us-census-update: + type: "object" + required: + - "query_path" + - "api_key" + properties: + query_params: + type: "string" + description: + "The query parameters portion of the GET request, without the\ + \ api key" + order: 0 + pattern: "^\\w+=[\\w,:*]+(&(?!key)\\w+=[\\w,:*]+)*$" + examples: + - "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" + - "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path: + type: "string" + description: "The path portion of the GET request" + order: 1 + pattern: "^data(\\/[\\w\\d]+)+$" + examples: + - "data/2019/cbp" + - "data/2018/acs" + - "data/timeseries/healthins/sahie" + api_key: + type: "string" + description: + "Your API Key. Get your key here." + order: 2 + airbyte_secret: true + source-goldcast: + title: "goldcast.io Source Spec" + type: "object" + required: + - "access_key" + - "sourceType" + properties: + access_key: + type: "string" + description: + "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "goldcast" + const: "goldcast" + enum: + - "goldcast" + order: 0 + type: "string" + source-goldcast-update: + title: "goldcast.io Source Spec" + type: "object" + required: + - "access_key" + properties: + access_key: + type: "string" + description: + "Your API Access Key. See here. The key is case sensitive." + airbyte_secret: true + source-pinterest: + title: "Pinterest Spec" + type: "object" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set a date,\ + \ it would be defaulted to latest allowed date by api (89 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + status: + title: "Status" + description: + "For the ads, ad_groups, and campaigns streams, specifying\ + \ a status will filter out records that do not match the specified ones.\ + \ If a status is not specified, the source will default to records with\ + \ a status of either ACTIVE or PAUSED." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true + credentials: + title: "OAuth2.0" + type: "object" + required: + - "auth_method" + - "refresh_token" + - "client_id" + - "client_secret" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to obtain new Access Token, when it's expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + custom_reports: + title: "Custom Reports" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + type: "array" + items: + title: "ReportConfig" + description: "Config for custom report" + type: "object" + required: + - "name" + - "level" + - "granularity" + - "columns" + properties: + name: + title: "Name" + description: "The name value of report" + type: "string" + order: 0 + level: + title: "Level" + description: "Chosen level for API" + default: "ADVERTISER" + enum: + - "ADVERTISER" + - "ADVERTISER_TARGETING" + - "CAMPAIGN" + - "CAMPAIGN_TARGETING" + - "AD_GROUP" + - "AD_GROUP_TARGETING" + - "PIN_PROMOTION" + - "PIN_PROMOTION_TARGETING" + - "KEYWORD" + - "PRODUCT_GROUP" + - "PRODUCT_GROUP_TARGETING" + - "PRODUCT_ITEM" + type: "string" + order: 1 + granularity: + title: "Granularity" + description: "Chosen granularity for API" + default: "TOTAL" + enum: + - "TOTAL" + - "DAY" + - "HOUR" + - "WEEK" + - "MONTH" + type: "string" + order: 2 + columns: + title: "Columns" + description: "A list of chosen columns" + default: [] + type: "array" + order: 3 + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "ADVERTISER_ID" + - "AD_ACCOUNT_ID" + - "AD_GROUP_ENTITY_STATUS" + - "AD_GROUP_ID" + - "AD_ID" + - "CAMPAIGN_DAILY_SPEND_CAP" + - "CAMPAIGN_ENTITY_STATUS" + - "CAMPAIGN_ID" + - "CAMPAIGN_LIFETIME_SPEND_CAP" + - "CAMPAIGN_NAME" + - "CHECKOUT_ROAS" + - "CLICKTHROUGH_1" + - "CLICKTHROUGH_1_GROSS" + - "CLICKTHROUGH_2" + - "CPC_IN_MICRO_DOLLAR" + - "CPM_IN_DOLLAR" + - "CPM_IN_MICRO_DOLLAR" + - "CTR" + - "CTR_2" + - "ECPCV_IN_DOLLAR" + - "ECPCV_P95_IN_DOLLAR" + - "ECPC_IN_DOLLAR" + - "ECPC_IN_MICRO_DOLLAR" + - "ECPE_IN_DOLLAR" + - "ECPM_IN_MICRO_DOLLAR" + - "ECPV_IN_DOLLAR" + - "ECTR" + - "EENGAGEMENT_RATE" + - "ENGAGEMENT_1" + - "ENGAGEMENT_2" + - "ENGAGEMENT_RATE" + - "IDEA_PIN_PRODUCT_TAG_VISIT_1" + - "IDEA_PIN_PRODUCT_TAG_VISIT_2" + - "IMPRESSION_1" + - "IMPRESSION_1_GROSS" + - "IMPRESSION_2" + - "INAPP_CHECKOUT_COST_PER_ACTION" + - "OUTBOUND_CLICK_1" + - "OUTBOUND_CLICK_2" + - "PAGE_VISIT_COST_PER_ACTION" + - "PAGE_VISIT_ROAS" + - "PAID_IMPRESSION" + - "PIN_ID" + - "PIN_PROMOTION_ID" + - "REPIN_1" + - "REPIN_2" + - "REPIN_RATE" + - "SPEND_IN_DOLLAR" + - "SPEND_IN_MICRO_DOLLAR" + - "TOTAL_CHECKOUT" + - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICKTHROUGH" + - "TOTAL_CLICK_ADD_TO_CART" + - "TOTAL_CLICK_CHECKOUT" + - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICK_LEAD" + - "TOTAL_CLICK_SIGNUP" + - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CONVERSIONS" + - "TOTAL_CUSTOM" + - "TOTAL_ENGAGEMENT" + - "TOTAL_ENGAGEMENT_CHECKOUT" + - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_ENGAGEMENT_LEAD" + - "TOTAL_ENGAGEMENT_SIGNUP" + - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT" + - "TOTAL_IMPRESSION_FREQUENCY" + - "TOTAL_IMPRESSION_USER" + - "TOTAL_LEAD" + - "TOTAL_OFFLINE_CHECKOUT" + - "TOTAL_PAGE_VISIT" + - "TOTAL_REPIN_RATE" + - "TOTAL_SIGNUP" + - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIDEO_3SEC_VIEWS" + - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND" + - "TOTAL_VIDEO_MRC_VIEWS" + - "TOTAL_VIDEO_P0_COMBINED" + - "TOTAL_VIDEO_P100_COMPLETE" + - "TOTAL_VIDEO_P25_COMBINED" + - "TOTAL_VIDEO_P50_COMBINED" + - "TOTAL_VIDEO_P75_COMBINED" + - "TOTAL_VIDEO_P95_COMBINED" + - "TOTAL_VIEW_ADD_TO_CART" + - "TOTAL_VIEW_CHECKOUT" + - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIEW_LEAD" + - "TOTAL_VIEW_SIGNUP" + - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CHECKOUT" + - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CLICK_CHECKOUT" + - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_SESSIONS" + - "TOTAL_WEB_VIEW_CHECKOUT" + - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "VIDEO_3SEC_VIEWS_2" + - "VIDEO_LENGTH" + - "VIDEO_MRC_VIEWS_2" + - "VIDEO_P0_COMBINED_2" + - "VIDEO_P100_COMPLETE_2" + - "VIDEO_P25_COMBINED_2" + - "VIDEO_P50_COMBINED_2" + - "VIDEO_P75_COMBINED_2" + - "VIDEO_P95_COMBINED_2" + - "WEB_CHECKOUT_COST_PER_ACTION" + - "WEB_CHECKOUT_ROAS" + - "WEB_SESSIONS_1" + - "WEB_SESSIONS_2" + click_window_days: + title: "Click window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a pin click action." + default: 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 4 + engagement_window_days: + title: "Engagement window days" + description: + "Number of days to use as the conversion attribution\ + \ window for an engagement action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 5 + view_window_days: + title: "View window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a view action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 6 + conversion_report_time: + title: "Conversion report time" + description: + "The date by which the conversion metrics returned from\ + \ this endpoint will be reported. There are two dates associated\ + \ with a conversion event: the date that the user interacted with\ + \ the ad, and the date that the user completed a conversion event.." + default: "TIME_OF_AD_ACTION" + enum: + - "TIME_OF_AD_ACTION" + - "TIME_OF_CONVERSION" + type: "string" + order: 7 + attribution_types: + title: "Attribution types" + description: "List of types of attribution for the conversion report" + default: + - "INDIVIDUAL" + - "HOUSEHOLD" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "INDIVIDUAL" + - "HOUSEHOLD" + order: 8 + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set\ + \ a date, it would be defaulted to latest allowed date by report\ + \ api (913 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + order: 9 + sourceType: + title: "pinterest" + const: "pinterest" + enum: + - "pinterest" + order: 0 + type: "string" + source-pinterest-update: + title: "Pinterest Spec" + type: "object" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set a date,\ + \ it would be defaulted to latest allowed date by api (89 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + status: + title: "Status" + description: + "For the ads, ad_groups, and campaigns streams, specifying\ + \ a status will filter out records that do not match the specified ones.\ + \ If a status is not specified, the source will default to records with\ + \ a status of either ACTIVE or PAUSED." + type: + - "array" + - "null" + items: + type: "string" + enum: + - "ACTIVE" + - "PAUSED" + - "ARCHIVED" + uniqueItems: true + credentials: + title: "OAuth2.0" + type: "object" + required: + - "auth_method" + - "refresh_token" + - "client_id" + - "client_secret" + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to obtain new Access Token, when it's expired." + airbyte_secret: true + custom_reports: + title: "Custom Reports" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + type: "array" + items: + title: "ReportConfig" + description: "Config for custom report" + type: "object" + required: + - "name" + - "level" + - "granularity" + - "columns" + properties: + name: + title: "Name" + description: "The name value of report" + type: "string" + order: 0 + level: + title: "Level" + description: "Chosen level for API" + default: "ADVERTISER" + enum: + - "ADVERTISER" + - "ADVERTISER_TARGETING" + - "CAMPAIGN" + - "CAMPAIGN_TARGETING" + - "AD_GROUP" + - "AD_GROUP_TARGETING" + - "PIN_PROMOTION" + - "PIN_PROMOTION_TARGETING" + - "KEYWORD" + - "PRODUCT_GROUP" + - "PRODUCT_GROUP_TARGETING" + - "PRODUCT_ITEM" + type: "string" + order: 1 + granularity: + title: "Granularity" + description: "Chosen granularity for API" + default: "TOTAL" + enum: + - "TOTAL" + - "DAY" + - "HOUR" + - "WEEK" + - "MONTH" + type: "string" + order: 2 + columns: + title: "Columns" + description: "A list of chosen columns" + default: [] + type: "array" + order: 3 + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "ADVERTISER_ID" + - "AD_ACCOUNT_ID" + - "AD_GROUP_ENTITY_STATUS" + - "AD_GROUP_ID" + - "AD_ID" + - "CAMPAIGN_DAILY_SPEND_CAP" + - "CAMPAIGN_ENTITY_STATUS" + - "CAMPAIGN_ID" + - "CAMPAIGN_LIFETIME_SPEND_CAP" + - "CAMPAIGN_NAME" + - "CHECKOUT_ROAS" + - "CLICKTHROUGH_1" + - "CLICKTHROUGH_1_GROSS" + - "CLICKTHROUGH_2" + - "CPC_IN_MICRO_DOLLAR" + - "CPM_IN_DOLLAR" + - "CPM_IN_MICRO_DOLLAR" + - "CTR" + - "CTR_2" + - "ECPCV_IN_DOLLAR" + - "ECPCV_P95_IN_DOLLAR" + - "ECPC_IN_DOLLAR" + - "ECPC_IN_MICRO_DOLLAR" + - "ECPE_IN_DOLLAR" + - "ECPM_IN_MICRO_DOLLAR" + - "ECPV_IN_DOLLAR" + - "ECTR" + - "EENGAGEMENT_RATE" + - "ENGAGEMENT_1" + - "ENGAGEMENT_2" + - "ENGAGEMENT_RATE" + - "IDEA_PIN_PRODUCT_TAG_VISIT_1" + - "IDEA_PIN_PRODUCT_TAG_VISIT_2" + - "IMPRESSION_1" + - "IMPRESSION_1_GROSS" + - "IMPRESSION_2" + - "INAPP_CHECKOUT_COST_PER_ACTION" + - "OUTBOUND_CLICK_1" + - "OUTBOUND_CLICK_2" + - "PAGE_VISIT_COST_PER_ACTION" + - "PAGE_VISIT_ROAS" + - "PAID_IMPRESSION" + - "PIN_ID" + - "PIN_PROMOTION_ID" + - "REPIN_1" + - "REPIN_2" + - "REPIN_RATE" + - "SPEND_IN_DOLLAR" + - "SPEND_IN_MICRO_DOLLAR" + - "TOTAL_CHECKOUT" + - "TOTAL_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICKTHROUGH" + - "TOTAL_CLICK_ADD_TO_CART" + - "TOTAL_CLICK_CHECKOUT" + - "TOTAL_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CLICK_LEAD" + - "TOTAL_CLICK_SIGNUP" + - "TOTAL_CLICK_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_CONVERSIONS" + - "TOTAL_CUSTOM" + - "TOTAL_ENGAGEMENT" + - "TOTAL_ENGAGEMENT_CHECKOUT" + - "TOTAL_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_ENGAGEMENT_LEAD" + - "TOTAL_ENGAGEMENT_SIGNUP" + - "TOTAL_ENGAGEMENT_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_IDEA_PIN_PRODUCT_TAG_VISIT" + - "TOTAL_IMPRESSION_FREQUENCY" + - "TOTAL_IMPRESSION_USER" + - "TOTAL_LEAD" + - "TOTAL_OFFLINE_CHECKOUT" + - "TOTAL_PAGE_VISIT" + - "TOTAL_REPIN_RATE" + - "TOTAL_SIGNUP" + - "TOTAL_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIDEO_3SEC_VIEWS" + - "TOTAL_VIDEO_AVG_WATCHTIME_IN_SECOND" + - "TOTAL_VIDEO_MRC_VIEWS" + - "TOTAL_VIDEO_P0_COMBINED" + - "TOTAL_VIDEO_P100_COMPLETE" + - "TOTAL_VIDEO_P25_COMBINED" + - "TOTAL_VIDEO_P50_COMBINED" + - "TOTAL_VIDEO_P75_COMBINED" + - "TOTAL_VIDEO_P95_COMBINED" + - "TOTAL_VIEW_ADD_TO_CART" + - "TOTAL_VIEW_CHECKOUT" + - "TOTAL_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_VIEW_LEAD" + - "TOTAL_VIEW_SIGNUP" + - "TOTAL_VIEW_SIGNUP_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CHECKOUT" + - "TOTAL_WEB_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_CLICK_CHECKOUT" + - "TOTAL_WEB_CLICK_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT" + - "TOTAL_WEB_ENGAGEMENT_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "TOTAL_WEB_SESSIONS" + - "TOTAL_WEB_VIEW_CHECKOUT" + - "TOTAL_WEB_VIEW_CHECKOUT_VALUE_IN_MICRO_DOLLAR" + - "VIDEO_3SEC_VIEWS_2" + - "VIDEO_LENGTH" + - "VIDEO_MRC_VIEWS_2" + - "VIDEO_P0_COMBINED_2" + - "VIDEO_P100_COMPLETE_2" + - "VIDEO_P25_COMBINED_2" + - "VIDEO_P50_COMBINED_2" + - "VIDEO_P75_COMBINED_2" + - "VIDEO_P95_COMBINED_2" + - "WEB_CHECKOUT_COST_PER_ACTION" + - "WEB_CHECKOUT_ROAS" + - "WEB_SESSIONS_1" + - "WEB_SESSIONS_2" + click_window_days: + title: "Click window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a pin click action." + default: 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 4 + engagement_window_days: + title: "Engagement window days" + description: + "Number of days to use as the conversion attribution\ + \ window for an engagement action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 5 + view_window_days: + title: "View window days" + description: + "Number of days to use as the conversion attribution\ + \ window for a view action." + default: + - 30 + enum: + - 0 + - 1 + - 7 + - 14 + - 30 + - 60 + type: "integer" + order: 6 + conversion_report_time: + title: "Conversion report time" + description: + "The date by which the conversion metrics returned from\ + \ this endpoint will be reported. There are two dates associated\ + \ with a conversion event: the date that the user interacted with\ + \ the ad, and the date that the user completed a conversion event.." + default: "TIME_OF_AD_ACTION" + enum: + - "TIME_OF_AD_ACTION" + - "TIME_OF_CONVERSION" + type: "string" + order: 7 + attribution_types: + title: "Attribution types" + description: "List of types of attribution for the conversion report" + default: + - "INDIVIDUAL" + - "HOUSEHOLD" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "INDIVIDUAL" + - "HOUSEHOLD" + order: 8 + start_date: + type: "string" + title: "Start Date" + description: + "A date in the format YYYY-MM-DD. If you have not set\ + \ a date, it would be defaulted to latest allowed date by report\ + \ api (913 days from today)." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2022-07-28" + order: 9 + source-spacex-api: + type: "object" + required: + - "sourceType" + properties: + id: + type: "string" + title: "Unique ID for specific source target" + desciption: "Optional, For a specific ID" + order: 0 + options: + type: "string" + title: "Configuration options for endpoints" + desciption: + "Optional, Possible values for an endpoint. Example values for\ + \ launches-latest, upcoming, past" + order: 1 + sourceType: + title: "spacex-api" + const: "spacex-api" + enum: + - "spacex-api" + order: 0 + type: "string" + source-spacex-api-update: + type: "object" + required: [] + properties: + id: + type: "string" + title: "Unique ID for specific source target" + desciption: "Optional, For a specific ID" + order: 0 + options: + type: "string" + title: "Configuration options for endpoints" + desciption: + "Optional, Possible values for an endpoint. Example values for\ + \ launches-latest, upcoming, past" + order: 1 + source-bamboo-hr: + title: "Bamboo HR Spec" + type: "object" + required: + - "api_key" + - "subdomain" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + description: "Api key of bamboo hr" + airbyte_secret: true + x-speakeasy-param-sensitive: true + subdomain: + type: "string" + order: 1 + title: "subdomain" + description: "Sub Domain of bamboo hr" + custom_reports_fields: + type: "string" + order: 2 + title: "custom_reports_fields" + description: "Comma-separated list of fields to include in custom reports." + custom_reports_include_default_fields: + title: "custom_reports_include_default_fields" + description: + "If true, the custom reports endpoint will include the default\ + \ fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." + type: "boolean" + default: true + order: 3 + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "bamboo-hr" + const: "bamboo-hr" + enum: + - "bamboo-hr" + order: 0 + type: "string" + source-bamboo-hr-update: + title: "Bamboo HR Spec" + type: "object" + required: + - "api_key" + - "subdomain" + properties: + api_key: + type: "string" + order: 0 + title: "api_key" + description: "Api key of bamboo hr" + airbyte_secret: true + subdomain: + type: "string" + order: 1 + title: "subdomain" + description: "Sub Domain of bamboo hr" + custom_reports_fields: + type: "string" + order: 2 + title: "custom_reports_fields" + description: "Comma-separated list of fields to include in custom reports." + custom_reports_include_default_fields: + title: "custom_reports_include_default_fields" + description: + "If true, the custom reports endpoint will include the default\ + \ fields defined here: https://documentation.bamboohr.com/docs/list-of-field-names." + type: "boolean" + default: true + order: 3 + start_date: + type: "string" + order: 4 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-freshchat: + type: "object" + required: + - "account_name" + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + account_name: + type: "string" + description: "The unique account name for your Freshchat instance" + name: "account_name" + order: 0 + title: "Account Name" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "freshchat" + const: "freshchat" + enum: + - "freshchat" + order: 0 + type: "string" + source-freshchat-update: + type: "object" + required: + - "account_name" + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + account_name: + type: "string" + description: "The unique account name for your Freshchat instance" + name: "account_name" + order: 0 + title: "Account Name" + airbyte_secret: false + source-okta: + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "OAuth 2.0 with private key" + required: + - "auth_type" + - "client_id" + - "key_id" + - "private_key" + - "scope" + properties: + auth_type: + type: "string" + const: "oauth2.0_private_key" + order: 0 + enum: + - "oauth2.0_private_key" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + key_id: + type: "string" + title: "Key ID" + description: "The key ID (kid)." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + private_key: + type: "string" + title: "Private key" + description: "The private key in PEM format" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + scope: + type: "string" + title: "Scope" + description: "The OAuth scope." + order: 4 + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: + "An Okta token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 0 + domain: + type: "string" + title: "Okta domain" + description: + "The Okta domain. See the docs for instructions on how to find it." + airbyte_secret: false + order: 1 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any\ + \ data before this date will not be replicated." + examples: + - "2022-07-22T00:00:00Z" + order: 2 + sourceType: + title: "okta" + const: "okta" + enum: + - "okta" + order: 0 + type: "string" + source-okta-update: + type: "object" + required: [] + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: + "Refresh Token to obtain new Access Token, when it's\ + \ expired." + airbyte_secret: true + - type: "object" + title: "OAuth 2.0 with private key" + required: + - "auth_type" + - "client_id" + - "key_id" + - "private_key" + - "scope" + properties: + auth_type: + type: "string" + const: "oauth2.0_private_key" + order: 0 + enum: + - "oauth2.0_private_key" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + order: 1 + key_id: + type: "string" + title: "Key ID" + description: "The key ID (kid)." + airbyte_secret: true + order: 2 + private_key: + type: "string" + title: "Private key" + description: "The private key in PEM format" + airbyte_secret: true + order: 3 + scope: + type: "string" + title: "Scope" + description: "The OAuth scope." + order: 4 + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: + "An Okta token. See the docs for instructions on how to generate it." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Okta domain" + description: + "The Okta domain. See the docs for instructions on how to find it." + airbyte_secret: false + order: 1 + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format YYYY-MM-DDTHH:MM:SSZ. Any\ + \ data before this date will not be replicated." + examples: + - "2022-07-22T00:00:00Z" + order: 2 + source-hibob: + type: "object" + required: + - "username" + - "is_sandbox" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + is_sandbox: + type: "boolean" + description: "Toggle true if this instance is a HiBob sandbox " + order: 2 + title: "Is Sandbox" + sourceType: + title: "hibob" + const: "hibob" + enum: + - "hibob" + order: 0 + type: "string" + source-hibob-update: + type: "object" + required: + - "username" + - "is_sandbox" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + is_sandbox: + type: "boolean" + description: "Toggle true if this instance is a HiBob sandbox " + order: 2 + title: "Is Sandbox" + source-mixpanel: + title: "Source Mixpanel Spec" + required: + - "credentials" + - "sourceType" + type: "object" + properties: + credentials: + title: "Authentication *" + description: "Choose how to authenticate to Mixpanel" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "Service Account" + required: + - "username" + - "secret" + - "project_id" + properties: + option_title: + type: "string" + const: "Service Account" + order: 0 + enum: + - "Service Account" + username: + order: 1 + title: "Username" + type: "string" + description: + "Mixpanel Service Account Username. See the docs\ + \ for more information on how to obtain this." + secret: + order: 2 + title: "Secret" + type: "string" + description: + "Mixpanel Service Account Secret. See the docs\ + \ for more information on how to obtain this." + airbyte_secret: true + x-speakeasy-param-sensitive: true + project_id: + order: 3 + title: "Project ID" + description: + "Your project ID number. See the docs for more information on how to obtain this." + type: "integer" + - type: "object" + title: "Project Secret" + required: + - "api_secret" + properties: + option_title: + type: "string" + const: "Project Secret" + order: 0 + enum: + - "Project Secret" + api_secret: + order: 1 + title: "Project Secret" + type: "string" + description: + "Mixpanel project secret. See the docs for more information on how to obtain this." + airbyte_secret: true + x-speakeasy-param-sensitive: true + attribution_window: + order: 2 + title: "Attribution Window" + type: "integer" + description: + "A period of time for attributing results to ads and the lookback\ + \ period after those actions occur during which ad results are counted.\ + \ Default attribution window is 5 days. (This value should be non-negative\ + \ integer)" + default: 5 + project_timezone: + order: 3 + title: "Project Timezone" + type: "string" + description: + "Time zone in which integer date times are stored. The project\ + \ timezone may be found in the project settings in the Mixpanel console." + default: "US/Pacific" + examples: + - "US/Pacific" + - "UTC" + select_properties_by_default: + order: 4 + title: "Select Properties By Default" + type: "boolean" + description: + "Setting this config parameter to TRUE ensures that new properties\ + \ on events and engage records are captured. Otherwise new properties\ + \ will be ignored." + default: true + start_date: + order: 5 + title: "Start Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If this option is not set, the connector will\ + \ replicate data from up to one year ago by default." + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + end_date: + order: 6 + title: "End Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. Left empty to always sync to most recent date" + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + region: + order: 7 + title: "Region" + description: "The region of mixpanel domain instance either US or EU." + type: "string" + enum: + - "US" + - "EU" + default: "US" + date_window_size: + order: 8 + title: "Date slicing window" + description: + "Defines window size in days, that used to slice through data.\ + \ You can reduce it, if amount of data in each window is too big for your\ + \ environment. (This value should be positive integer)" + type: "integer" + minimum: 1 + default: 30 + page_size: + order: 9 + title: "Page Size" + description: + "The number of records to fetch per request for the engage\ + \ stream. Default is 1000. If you are experiencing long sync times with\ + \ this stream, try increasing this value." + type: "integer" + minimum: 1 + default: 1000 + sourceType: + title: "mixpanel" + const: "mixpanel" + enum: + - "mixpanel" + order: 0 + type: "string" + source-mixpanel-update: + title: "Source Mixpanel Spec" + required: + - "credentials" + type: "object" + properties: + credentials: + title: "Authentication *" + description: "Choose how to authenticate to Mixpanel" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "Service Account" + required: + - "username" + - "secret" + - "project_id" + properties: + option_title: + type: "string" + const: "Service Account" + order: 0 + enum: + - "Service Account" + username: + order: 1 + title: "Username" + type: "string" + description: + "Mixpanel Service Account Username. See the docs\ + \ for more information on how to obtain this." + secret: + order: 2 + title: "Secret" + type: "string" + description: + "Mixpanel Service Account Secret. See the docs\ + \ for more information on how to obtain this." + airbyte_secret: true + project_id: + order: 3 + title: "Project ID" + description: + "Your project ID number. See the docs for more information on how to obtain this." + type: "integer" + - type: "object" + title: "Project Secret" + required: + - "api_secret" + properties: + option_title: + type: "string" + const: "Project Secret" + order: 0 + enum: + - "Project Secret" + api_secret: + order: 1 + title: "Project Secret" + type: "string" + description: + "Mixpanel project secret. See the docs for more information on how to obtain this." + airbyte_secret: true + attribution_window: + order: 2 + title: "Attribution Window" + type: "integer" + description: + "A period of time for attributing results to ads and the lookback\ + \ period after those actions occur during which ad results are counted.\ + \ Default attribution window is 5 days. (This value should be non-negative\ + \ integer)" + default: 5 + project_timezone: + order: 3 + title: "Project Timezone" + type: "string" + description: + "Time zone in which integer date times are stored. The project\ + \ timezone may be found in the project settings in the Mixpanel console." + default: "US/Pacific" + examples: + - "US/Pacific" + - "UTC" + select_properties_by_default: + order: 4 + title: "Select Properties By Default" + type: "boolean" + description: + "Setting this config parameter to TRUE ensures that new properties\ + \ on events and engage records are captured. Otherwise new properties\ + \ will be ignored." + default: true + start_date: + order: 5 + title: "Start Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If this option is not set, the connector will\ + \ replicate data from up to one year ago by default." + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + end_date: + order: 6 + title: "End Date" + type: "string" + description: + "The date in the format YYYY-MM-DD. Any data after this date\ + \ will not be replicated. Left empty to always sync to most recent date" + examples: + - "2021-11-16" + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?$" + format: "date-time" + region: + order: 7 + title: "Region" + description: "The region of mixpanel domain instance either US or EU." + type: "string" + enum: + - "US" + - "EU" + default: "US" + date_window_size: + order: 8 + title: "Date slicing window" + description: + "Defines window size in days, that used to slice through data.\ + \ You can reduce it, if amount of data in each window is too big for your\ + \ environment. (This value should be positive integer)" + type: "integer" + minimum: 1 + default: 30 + page_size: + order: 9 + title: "Page Size" + description: + "The number of records to fetch per request for the engage\ + \ stream. Default is 1000. If you are experiencing long sync times with\ + \ this stream, try increasing this value." + type: "integer" + minimum: 1 + default: 1000 + source-ip2whois: + type: "object" + required: + - "sourceType" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your API Key. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + domain: + type: "string" + title: "Domain" + description: + "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + order: 1 + sourceType: + title: "ip2whois" + const: "ip2whois" + enum: + - "ip2whois" + order: 0 + type: "string" + source-ip2whois-update: + type: "object" + required: [] + properties: + api_key: + type: "string" + title: "API key" + description: + "Your API Key. See here." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Domain" + description: + "Domain name. See here." + examples: + - "www.google.com" + - "www.facebook.com" + order: 1 + source-twitter: + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + description: + "App only Bearer Token. See the docs for more information on how to obtain this token." + title: "Access Token" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + query: + type: "string" + description: + "Query for matching Tweets. You can learn how to build this\ + \ query by reading build a query guide ." + title: "Search Query" + order: 1 + start_date: + type: "string" + description: + "The start date for retrieving tweets cannot be more than 7\ + \ days in the past." + title: "Start Date" + format: "date-time" + order: 2 + end_date: + type: "string" + description: + "The end date for retrieving tweets must be a minimum of 10\ + \ seconds prior to the request time." + title: "End Date" + format: "date-time" + order: 3 + sourceType: + title: "twitter" + const: "twitter" + enum: + - "twitter" + order: 0 + type: "string" + source-twitter-update: + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + description: + "App only Bearer Token. See the docs for more information on how to obtain this token." + title: "Access Token" + airbyte_secret: true + order: 0 + query: + type: "string" + description: + "Query for matching Tweets. You can learn how to build this\ + \ query by reading build a query guide ." + title: "Search Query" + order: 1 + start_date: + type: "string" + description: + "The start date for retrieving tweets cannot be more than 7\ + \ days in the past." + title: "Start Date" + format: "date-time" + order: 2 + end_date: + type: "string" + description: + "The end date for retrieving tweets must be a minimum of 10\ + \ seconds prior to the request time." + title: "End Date" + format: "date-time" + order: 3 + source-sftp-bulk: + title: "SFTP Bulk Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + host: + title: "Host Address" + description: "The server host address" + examples: + - "www.host.com" + - "192.0.2.1" + order: 2 + type: "string" + username: + title: "User Name" + description: "The server user" + order: 3 + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the SFTP Server" + type: "object" + order: 4 + oneOf: + - title: "Authenticate via Password" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "password" + const: "password" + enum: + - "password" + type: "string" + password: + title: "Password" + description: "Password" + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + - "auth_type" + - title: "Authenticate via Private Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "private_key" + const: "private_key" + enum: + - "private_key" + type: "string" + private_key: + title: "Private key" + description: "The Private key" + multiline: true + order: 4 + type: "string" + required: + - "private_key" + - "auth_type" + port: + title: "Host Address" + description: "The server port" + default: 22 + examples: + - "22" + order: 5 + type: "integer" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + default: "/" + examples: + - "/logs/2022" + order: 6 + pattern_descriptor: "/folder_to_sync" + type: "string" + sourceType: + title: "sftp-bulk" + const: "sftp-bulk" + enum: + - "sftp-bulk" + order: 0 + type: "string" + required: + - "streams" + - "host" + - "username" + - "credentials" + - "sourceType" + source-sftp-bulk-update: + title: "SFTP Bulk Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + host: + title: "Host Address" + description: "The server host address" + examples: + - "www.host.com" + - "192.0.2.1" + order: 2 + type: "string" + username: + title: "User Name" + description: "The server user" + order: 3 + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the SFTP Server" + type: "object" + order: 4 + oneOf: + - title: "Authenticate via Password" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "password" + const: "password" + enum: + - "password" + type: "string" + password: + title: "Password" + description: "Password" + airbyte_secret: true + order: 3 + type: "string" + required: + - "password" + - "auth_type" + - title: "Authenticate via Private Key" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "private_key" + const: "private_key" + enum: + - "private_key" + type: "string" + private_key: + title: "Private key" + description: "The Private key" + multiline: true + order: 4 + type: "string" + required: + - "private_key" + - "auth_type" + port: + title: "Host Address" + description: "The server port" + default: 22 + examples: + - "22" + order: 5 + type: "integer" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + default: "/" + examples: + - "/logs/2022" + order: 6 + pattern_descriptor: "/folder_to_sync" + type: "string" + required: + - "streams" + - "host" + - "username" + - "credentials" + source-zendesk-support: + title: "Source Zendesk Support Spec" + type: "object" + required: + - "subdomain" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The UTC date and time from which you'd like to replicate data,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2020-10-15T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + format: "date-time" + order: 2 + subdomain: + type: "string" + title: "Subdomain" + description: + "This is your unique Zendesk subdomain that can be found in\ + \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ + \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 + credentials: + title: "Authentication" + type: "object" + description: + "Zendesk allows two authentication methods. We recommend using\ + \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ + \ Source users." + order: 1 + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The OAuth access token. See the Zendesk docs for more information on generating this token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See our full documentation for more information on generating this\ + \ token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zendesk-support" + const: "zendesk-support" + enum: + - "zendesk-support" + order: 0 + type: "string" + source-zendesk-support-update: + title: "Source Zendesk Support Spec" + type: "object" + required: + - "subdomain" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The UTC date and time from which you'd like to replicate data,\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated." + examples: + - "2020-10-15T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + format: "date-time" + order: 2 + subdomain: + type: "string" + title: "Subdomain" + description: + "This is your unique Zendesk subdomain that can be found in\ + \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ + \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 + credentials: + title: "Authentication" + type: "object" + description: + "Zendesk allows two authentication methods. We recommend using\ + \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ + \ Source users." + order: 1 + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "access_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + access_token: + type: "string" + title: "Access Token" + description: + "The OAuth access token. See the Zendesk docs for more information on generating this token." + airbyte_secret: true + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + - title: "API Token" + type: "object" + required: + - "email" + - "api_token" + additionalProperties: true + properties: + credentials: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + email: + title: "Email" + type: "string" + description: "The user email for your Zendesk account." + api_token: + title: "API Token" + type: "string" + description: + "The value of the API token generated. See our full documentation for more information on generating this\ + \ token." + airbyte_secret: true + source-microsoft-onedrive: + title: "Microsoft OneDrive Source Spec" + description: + "SourceMicrosoftOneDriveSpec class for Microsoft OneDrive Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the OneDrive API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + drive_name: + title: "Drive Name" + description: "Name of the Microsoft OneDrive drive where the file(s) exist." + default: "OneDrive" + order: 2 + type: "string" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS'\ + \ for shared items the user has access to, and 'ALL' to search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + sourceType: + title: "microsoft-onedrive" + const: "microsoft-onedrive" + enum: + - "microsoft-onedrive" + order: 0 + type: "string" + required: + - "streams" + - "credentials" + - "sourceType" + source-microsoft-onedrive-update: + title: "Microsoft OneDrive Source Spec" + description: + "SourceMicrosoftOneDriveSpec class for Microsoft OneDrive Source\ + \ Specification.\nThis class combines the authentication details with additional\ + \ configuration for the OneDrive API." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + credentials: + title: "Authentication" + description: "Credentials for connecting to the One Drive API" + type: "object" + order: 0 + oneOf: + - title: "Authenticate via Microsoft (OAuth)" + description: + "OAuthCredentials class to hold authentication details for\ + \ Microsoft OAuth authentication.\nThis class uses pydantic for data\ + \ validation and settings management." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "client_id" + - "client_secret" + - "refresh_token" + - title: "Service Key Authentication" + description: + "ServiceCredentials class for service key authentication.\n\ + This class is structured similarly to OAuthCredentials but for a different\ + \ authentication method." + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + tenant_id: + title: "Tenant ID" + description: "Tenant ID of the Microsoft OneDrive user" + airbyte_secret: true + type: "string" + user_principal_name: + title: "User Principal Name" + description: + "Special characters such as a period, comma, space, and\ + \ the at sign (@) are converted to underscores (_). More details:\ + \ https://learn.microsoft.com/en-us/sharepoint/list-onedrive-urls" + airbyte_secret: true + type: "string" + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + required: + - "tenant_id" + - "user_principal_name" + - "client_id" + - "client_secret" + drive_name: + title: "Drive Name" + description: "Name of the Microsoft OneDrive drive where the file(s) exist." + default: "OneDrive" + order: 2 + type: "string" + search_scope: + title: "Search Scope" + description: + "Specifies the location(s) to search for files. Valid options\ + \ are 'ACCESSIBLE_DRIVES' to search in the selected OneDrive drive, 'SHARED_ITEMS'\ + \ for shared items the user has access to, and 'ALL' to search both." + default: "ALL" + enum: + - "ACCESSIBLE_DRIVES" + - "SHARED_ITEMS" + - "ALL" + order: 3 + type: "string" + folder_path: + title: "Folder Path" + description: + "Path to a specific folder within the drives to search for\ + \ files. Leave empty to search all folders of the drives. This does not\ + \ apply to shared items." + default: "." + order: 4 + type: "string" + required: + - "streams" + - "credentials" + source-appfigures: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + search_store: + type: "string" + description: "The store which needs to be searched in streams" + title: "Search Store" + default: "apple" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + group_by: + type: "string" + description: "Category term for grouping the search results" + title: "Group by" + default: "product" + enum: + - "network" + - "product" + - "country" + - "date" + order: 3 + sourceType: + title: "appfigures" + const: "appfigures" + enum: + - "appfigures" + order: 0 + type: "string" + source-appfigures-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + search_store: + type: "string" + description: "The store which needs to be searched in streams" + title: "Search Store" + default: "apple" + order: 1 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + group_by: + type: "string" + description: "Category term for grouping the search results" + title: "Group by" + default: "product" + enum: + - "network" + - "product" + - "country" + - "date" + order: 3 + source-tiktok-marketing: + title: "TikTok Marketing Source Spec" + type: "object" + properties: + credentials: + title: "Authentication Method" + description: "Authentication method" + default: {} + order: 0 + type: "object" + oneOf: + - title: "OAuth2.0" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + description: "Long-term Authorized Access Token." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID to filter reports and streams. Let\ + \ this empty to retrieve all." + type: "string" + required: + - "app_id" + - "secret" + - "access_token" + - title: "Sandbox Access Token" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "sandbox_access_token" + order: 0 + type: "string" + enum: + - "sandbox_access_token" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID which generated for the developer's\ + \ Sandbox application." + type: "string" + access_token: + title: "Access Token" + description: "The long-term authorized access token." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "advertiser_id" + - "access_token" + start_date: + title: "Replication Start Date" + description: + "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + format: "date" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DD. All data generated between\ + \ start_date and this date will be replicated. Not setting this option\ + \ will result in always syncing the data till the current date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + type: "string" + format: "date" + attribution_window: + title: "Attribution Window" + description: "The attribution window in days." + minimum: 0 + maximum: 364 + default: 3 + order: 3 + type: "integer" + include_deleted: + title: + "Include Deleted Data in Reports and Ads, Ad Groups and Campaign\ + \ streams." + description: + "Set to active if you want to include deleted data in report\ + \ based streams and Ads, Ad Groups and Campaign streams." + default: false + order: 4 + type: "boolean" + sourceType: + title: "tiktok-marketing" + const: "tiktok-marketing" + enum: + - "tiktok-marketing" + order: 0 + type: "string" + source-tiktok-marketing-update: + title: "TikTok Marketing Source Spec" + type: "object" + properties: + credentials: + title: "Authentication Method" + description: "Authentication method" + default: {} + order: 0 + type: "object" + oneOf: + - title: "OAuth2.0" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + access_token: + title: "Access Token" + description: "Long-term Authorized Access Token." + airbyte_secret: true + type: "string" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID to filter reports and streams. Let\ + \ this empty to retrieve all." + type: "string" + required: + - "app_id" + - "secret" + - "access_token" + - title: "Sandbox Access Token" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "sandbox_access_token" + order: 0 + type: "string" + enum: + - "sandbox_access_token" + advertiser_id: + title: "Advertiser ID" + description: + "The Advertiser ID which generated for the developer's\ + \ Sandbox application." + type: "string" + access_token: + title: "Access Token" + description: "The long-term authorized access token." + airbyte_secret: true + type: "string" + required: + - "advertiser_id" + - "access_token" + start_date: + title: "Replication Start Date" + description: + "The Start Date in format: YYYY-MM-DD. Any data before this\ + \ date will not be replicated. If this parameter is not set, all data\ + \ will be replicated." + default: "2016-09-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 1 + type: "string" + format: "date" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DD. All data generated between\ + \ start_date and this date will be replicated. Not setting this option\ + \ will result in always syncing the data till the current date." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + type: "string" + format: "date" + attribution_window: + title: "Attribution Window" + description: "The attribution window in days." + minimum: 0 + maximum: 364 + default: 3 + order: 3 + type: "integer" + include_deleted: + title: + "Include Deleted Data in Reports and Ads, Ad Groups and Campaign\ + \ streams." + description: + "Set to active if you want to include deleted data in report\ + \ based streams and Ads, Ad Groups and Campaign streams." + default: false + order: 4 + type: "boolean" + source-aws-cloudtrail: + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + - "sourceType" + properties: + aws_key_id: + type: "string" + title: "Key ID" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_secret_key: + type: "string" + title: "Secret Key" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_region_name: + type: "string" + title: "Region Name" + description: + "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + default: "us-east-1" + start_date: + type: "string" + title: "Start Date" + description: + "The date you would like to replicate data. Data in AWS CloudTrail\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + lookup_attributes_filter: + title: + "Filter applied while fetching records based on AttributeKey and\ + \ AttributeValue which will be appended on the request body" + type: "object" + required: + - "attribute_key" + - "attribute_value" + properties: + attribute_key: + type: "string" + title: "Attribute Key from the response to filter" + examples: + - "EventName" + default: "EventName" + attribute_value: + type: "string" + title: "Corresponding value to the given attribute key" + examples: + - "ListInstanceAssociations" + - "ConsoleLogin" + default: "ListInstanceAssociations" + sourceType: + title: "aws-cloudtrail" + const: "aws-cloudtrail" + enum: + - "aws-cloudtrail" + order: 0 + type: "string" + source-aws-cloudtrail-update: + title: "Aws CloudTrail Spec" + type: "object" + required: + - "aws_key_id" + - "aws_secret_key" + - "aws_region_name" + properties: + aws_key_id: + type: "string" + title: "Key ID" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + aws_secret_key: + type: "string" + title: "Secret Key" + description: + "AWS CloudTrail Access Key ID. See the docs for more information on how to obtain this key." + airbyte_secret: true + aws_region_name: + type: "string" + title: "Region Name" + description: + "The default AWS Region to use, for example, us-west-1 or us-west-2.\ + \ When specifying a Region inline during client initialization, this property\ + \ is named region_name." + default: "us-east-1" + start_date: + type: "string" + title: "Start Date" + description: + "The date you would like to replicate data. Data in AWS CloudTrail\ + \ is available for last 90 days only. Format: YYYY-MM-DD." + examples: + - "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + lookup_attributes_filter: + title: + "Filter applied while fetching records based on AttributeKey and\ + \ AttributeValue which will be appended on the request body" + type: "object" + required: + - "attribute_key" + - "attribute_value" + properties: + attribute_key: + type: "string" + title: "Attribute Key from the response to filter" + examples: + - "EventName" + default: "EventName" + attribute_value: + type: "string" + title: "Corresponding value to the given attribute key" + examples: + - "ListInstanceAssociations" + - "ConsoleLogin" + default: "ListInstanceAssociations" + source-jira: + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + - "sourceType" + properties: + api_token: + type: "string" + title: "API Token" + description: + "Jira API Token. See the docs for more information on how to generate this key. API Token\ + \ is used for Authorization to your account by BasicAuth." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + domain: + type: "string" + title: "Domain" + examples: + - ".atlassian.net" + - ".jira.com" + - "jira..com" + description: + "The Domain for your Jira account, e.g. airbyteio.atlassian.net,\ + \ airbyteio.jira.com, jira.your-domain.com" + order: 1 + email: + type: "string" + title: "Email" + description: + "The user email for your Jira account which you used to generate\ + \ the API token. This field is used for Authorization to your account\ + \ by BasicAuth." + order: 2 + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: + "List of Jira project keys to replicate data for, or leave\ + \ it empty if you want to replicate data for all projects." + order: 3 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you want to replicate data from Jira,\ + \ use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies\ + \ to certain streams, and only data generated on or after the start date\ + \ will be replicated. Or leave it empty if you want to replicate all data.\ + \ For more information, refer to the documentation." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 4 + lookback_window_minutes: + title: "Lookback window" + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N minutes. By default, updated objects that\ + \ are not newly created are not incrementally synced." + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 5 + enable_experimental_streams: + type: "boolean" + title: "Enable Experimental Streams" + description: + "Allow the use of experimental streams which rely on undocumented\ + \ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\ + \ for more info." + default: false + order: 6 + sourceType: + title: "jira" + const: "jira" + enum: + - "jira" + order: 0 + type: "string" + source-jira-update: + title: "Jira Spec" + type: "object" + required: + - "api_token" + - "domain" + - "email" + properties: + api_token: + type: "string" + title: "API Token" + description: + "Jira API Token. See the docs for more information on how to generate this key. API Token\ + \ is used for Authorization to your account by BasicAuth." + airbyte_secret: true + order: 0 + domain: + type: "string" + title: "Domain" + examples: + - ".atlassian.net" + - ".jira.com" + - "jira..com" + description: + "The Domain for your Jira account, e.g. airbyteio.atlassian.net,\ + \ airbyteio.jira.com, jira.your-domain.com" + order: 1 + email: + type: "string" + title: "Email" + description: + "The user email for your Jira account which you used to generate\ + \ the API token. This field is used for Authorization to your account\ + \ by BasicAuth." + order: 2 + projects: + type: "array" + title: "Projects" + items: + type: "string" + examples: + - "PROJ1" + - "PROJ2" + description: + "List of Jira project keys to replicate data for, or leave\ + \ it empty if you want to replicate data for all projects." + order: 3 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you want to replicate data from Jira,\ + \ use the format YYYY-MM-DDT00:00:00Z. Note that this field only applies\ + \ to certain streams, and only data generated on or after the start date\ + \ will be replicated. Or leave it empty if you want to replicate all data.\ + \ For more information, refer to the documentation." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 4 + lookback_window_minutes: + title: "Lookback window" + description: + "When set to N, the connector will always refresh resources\ + \ created within the past N minutes. By default, updated objects that\ + \ are not newly created are not incrementally synced." + examples: + - 60 + default: 0 + minimum: 0 + maximum: 576000 + type: "integer" + order: 5 + enable_experimental_streams: + type: "boolean" + title: "Enable Experimental Streams" + description: + "Allow the use of experimental streams which rely on undocumented\ + \ Jira API endpoints. See https://docs.airbyte.com/integrations/sources/jira#experimental-tables\ + \ for more info." + default: false + order: 6 + source-hubspot: + title: "HubSpot Source Spec" + type: "object" + required: + - "credentials" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If not set, \"2006-06-01T00:00:00Z\"\ + \ (Hubspot creation date) will be used as start date. It's recommended\ + \ to provide relevant to your data start date value to optimize synchronization." + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + credentials: + title: "Authentication" + description: "Choose how to authenticate to HubSpot." + type: "object" + oneOf: + - type: "object" + title: "OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Refresh token to renew an expired access token. See\ + \ the Hubspot docs if you need help finding this token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Private App" + required: + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials set" + const: "Private App Credentials" + order: 0 + enum: + - "Private App Credentials" + access_token: + title: "Access token" + description: + "HubSpot Access token. See the Hubspot docs if you need help finding this token." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + enable_experimental_streams: + title: "Enable experimental streams" + description: + "If enabled then experimental streams become available for\ + \ sync." + type: "boolean" + default: false + sourceType: + title: "hubspot" + const: "hubspot" + enum: + - "hubspot" + order: 0 + type: "string" + source-hubspot-update: + title: "HubSpot Source Spec" + type: "object" + required: + - "credentials" + properties: + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. If not set, \"2006-06-01T00:00:00Z\"\ + \ (Hubspot creation date) will be used as start date. It's recommended\ + \ to provide relevant to your data start date value to optimize synchronization." + examples: + - "2017-01-25T00:00:00Z" + format: "date-time" + credentials: + title: "Authentication" + description: "Choose how to authenticate to HubSpot." + type: "object" + oneOf: + - type: "object" + title: "OAuth" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + description: + "Refresh token to renew an expired access token. See\ + \ the Hubspot docs if you need help finding this token." + type: "string" + examples: + - "refresh_token" + airbyte_secret: true + - type: "object" + title: "Private App" + required: + - "access_token" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Auth Type" + description: "Name of the credentials set" + const: "Private App Credentials" + order: 0 + enum: + - "Private App Credentials" + access_token: + title: "Access token" + description: + "HubSpot Access token. See the Hubspot docs if you need help finding this token." + type: "string" + airbyte_secret: true + enable_experimental_streams: + title: "Enable experimental streams" + description: + "If enabled then experimental streams become available for\ + \ sync." + type: "boolean" + default: false + source-rss: + title: "RSS Spec" + type: "object" + required: + - "url" + - "sourceType" + properties: + url: + type: "string" + description: "RSS Feed URL" + sourceType: + title: "rss" + const: "rss" + enum: + - "rss" + order: 0 + type: "string" + source-rss-update: + title: "RSS Spec" + type: "object" + required: + - "url" + properties: + url: + type: "string" + description: "RSS Feed URL" + source-sap-fieldglass: + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "sap-fieldglass" + const: "sap-fieldglass" + enum: + - "sap-fieldglass" + order: 0 + type: "string" + source-sap-fieldglass-update: + title: "Sap Fieldglass Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: "API Key" + airbyte_secret: true + source-twilio-taskrouter: + type: "object" + required: + - "account_sid" + - "auth_token" + - "sourceType" + properties: + account_sid: + type: "string" + description: "Twilio Account ID" + title: "Account SID" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + auth_token: + type: "string" + description: "Twilio Auth Token" + airbyte_secret: true + title: "Auth Token" + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "twilio-taskrouter" + const: "twilio-taskrouter" + enum: + - "twilio-taskrouter" + order: 0 + type: "string" + source-twilio-taskrouter-update: + type: "object" + required: + - "account_sid" + - "auth_token" + properties: + account_sid: + type: "string" + description: "Twilio Account ID" + title: "Account SID" + airbyte_secret: true + order: 0 + auth_token: + type: "string" + description: "Twilio Auth Token" + airbyte_secret: true + title: "Auth Token" + order: 1 + source-xkcd: + type: "object" + properties: + comic_number: + type: "string" + title: "comic_number" + description: + "Specifies the comic number in which details are to be extracted,\ + \ pagination will begin with that number to end of available comics" + default: "2960" + order: 0 + sourceType: + title: "xkcd" + const: "xkcd" + enum: + - "xkcd" + order: 0 + type: "string" + source-xkcd-update: + type: "object" + properties: + comic_number: + type: "string" + title: "comic_number" + description: + "Specifies the comic number in which details are to be extracted,\ + \ pagination will begin with that number to end of available comics" + default: "2960" + order: 0 + source-zenloop: + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + description: + "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + x-speakeasy-param-sensitive: true + date_from: + type: "string" + description: + "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: + "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + x-speakeasy-param-sensitive: true + survey_group_id: + type: "string" + description: + "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "zenloop" + const: "zenloop" + enum: + - "zenloop" + order: 0 + type: "string" + source-zenloop-update: + title: "Zenloop Spec" + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + description: + "Zenloop API Token. You can get the API token in settings page\ + \ here " + airbyte_secret: true + date_from: + type: "string" + description: + "Zenloop date_from. Format: 2021-10-24T03:30:30Z or 2021-10-24.\ + \ Leave empty if only data from current data should be synced" + examples: + - "2021-10-24T03:30:30Z" + survey_id: + type: "string" + description: + "Zenloop Survey ID. Can be found here. Leave empty to pull answers from all surveys" + airbyte_secret: true + survey_group_id: + type: "string" + description: + "Zenloop Survey Group ID. Can be found by pulling All Survey\ + \ Groups via SurveyGroups stream. Leave empty to pull answers from all\ + \ survey groups" + airbyte_secret: true + source-tempo: + type: "object" + required: + - "api_token" + - "sourceType" + properties: + api_token: + type: "string" + title: "API token" + description: + "Tempo API Token. Go to Tempo>Settings, scroll down to Data\ + \ Access and select API integration." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "tempo" + const: "tempo" + enum: + - "tempo" + order: 0 + type: "string" + source-tempo-update: + type: "object" + required: + - "api_token" + properties: + api_token: + type: "string" + title: "API token" + description: + "Tempo API Token. Go to Tempo>Settings, scroll down to Data\ + \ Access and select API integration." + airbyte_secret: true + order: 0 + source-chargebee: + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + - "sourceType" + properties: + site_api_key: + type: "string" + title: "API Key" + description: + "Chargebee API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + order: 1 + start_date: + type: "string" + format: "date-time" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000Z.\ + \ Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-25T00:00:00Z" + order: 2 + product_catalog: + type: "string" + title: "Product Catalog" + description: + "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section. If left blank, the product catalog\ + \ version will be set to 2.0." + enum: + - "1.0" + - "2.0" + default: "2.0" + order: 3 + sourceType: + title: "chargebee" + const: "chargebee" + enum: + - "chargebee" + order: 0 + type: "string" + source-chargebee-update: + title: "Chargebee Spec" + type: "object" + required: + - "site" + - "site_api_key" + - "start_date" + properties: + site_api_key: + type: "string" + title: "API Key" + description: + "Chargebee API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + site: + type: "string" + title: "Site" + description: "The site prefix for your Chargebee instance." + examples: + - "airbyte-test" + order: 1 + start_date: + type: "string" + format: "date-time" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000Z.\ + \ Any data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-25T00:00:00Z" + order: 2 + product_catalog: + type: "string" + title: "Product Catalog" + description: + "Product Catalog version of your Chargebee site. Instructions\ + \ on how to find your version you may find here under `API Version` section. If left blank, the product catalog\ + \ version will be set to 2.0." + enum: + - "1.0" + - "2.0" + default: "2.0" + order: 3 + source-onesignal: + title: "OneSignal Source Spec" + type: "object" + required: + - "user_auth_key" + - "start_date" + - "outcome_names" + - "applications" + - "sourceType" + properties: + user_auth_key: + type: "string" + title: "User Auth Key" + description: + "OneSignal User Auth Key, see the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + applications: + type: "array" + title: "Applications" + description: + "Applications keys, see the docs for more information on how to obtain this data" + items: + type: "object" + properties: + app_name: + type: "string" + title: "OneSignal App Name" + order: 0 + app_id: + type: "string" + title: "OneSignal App ID" + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + app_api_key: + type: "string" + title: "REST API Key" + order: 2 + airbyte_secret: true + x-speakeasy-param-sensitive: true + required: + - "app_id" + - "app_api_key" + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for OneSignal\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 2 + outcome_names: + type: "string" + title: "Outcome Names" + description: + "Comma-separated list of names and the value (sum/count) for\ + \ the returned outcome data. See the docs for more details" + examples: + - "os__session_duration.count,os__click.count,CustomOutcomeName.sum" + order: 3 + sourceType: + title: "onesignal" + const: "onesignal" + enum: + - "onesignal" + order: 0 + type: "string" + source-onesignal-update: + title: "OneSignal Source Spec" + type: "object" + required: + - "user_auth_key" + - "start_date" + - "outcome_names" + - "applications" + properties: + user_auth_key: + type: "string" + title: "User Auth Key" + description: + "OneSignal User Auth Key, see the docs for more information on how to obtain this key." + airbyte_secret: true + order: 0 + applications: + type: "array" + title: "Applications" + description: + "Applications keys, see the docs for more information on how to obtain this data" + items: + type: "object" + properties: + app_name: + type: "string" + title: "OneSignal App Name" + order: 0 + app_id: + type: "string" + title: "OneSignal App ID" + order: 1 + airbyte_secret: true + app_api_key: + type: "string" + title: "REST API Key" + order: 2 + airbyte_secret: true + required: + - "app_id" + - "app_api_key" + order: 1 + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for OneSignal\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + order: 2 + outcome_names: + type: "string" + title: "Outcome Names" + description: + "Comma-separated list of names and the value (sum/count) for\ + \ the returned outcome data. See the docs for more details" + examples: + - "os__session_duration.count,os__click.count,CustomOutcomeName.sum" + order: 3 + source-google-analytics-data-api: + title: "Google Analytics (Data API) Spec" + type: "object" + required: + - "property_ids" + - "sourceType" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Google Analytics developer\ + \ application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining a new access token." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + credentials_json: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key linked to the service account used for\ + \ authorization. For steps on obtaining this key, refer to the setup guide." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + property_ids: + title: "Property IDs" + description: + "A list of your Property IDs. The Property ID is a unique number\ + \ assigned to each property in Google Analytics, found in your GA4 property\ + \ URL. This ID allows the connector to track the specific events associated\ + \ with your property. Refer to the Google\ + \ Analytics documentation to locate your property ID." + order: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + examples: + - - "1738294" + - "5729978930" + uniqueItems: true + date_ranges_start_date: + type: "string" + title: "Start Date" + description: + "The start date from which to replicate report data in the\ + \ format YYYY-MM-DD. Data generated before this date will not be included\ + \ in the report. Not applied to custom Cohort reports." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 4 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name." + type: "string" + order: 0 + dimensions: + title: "Dimensions" + description: "A list of dimensions." + type: "array" + items: + type: "string" + minItems: 1 + order: 1 + metrics: + title: "Metrics" + description: "A list of metrics." + type: "array" + items: + type: "string" + minItems: 1 + order: 2 + dimensionFilter: + title: "Dimensions filter" + description: "Dimensions filter" + type: "object" + order: 3 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all dimensions." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + metricFilter: + title: "Metrics filter" + description: "Metrics filter" + type: "object" + order: 4 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all metrics." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + cohortSpec: + title: "Cohort Reports" + description: + "Cohort reports creates a time series of user retention\ + \ for the cohort." + type: "object" + order: 5 + oneOf: + - title: "Disabled" + type: "object" + properties: + enabled: + type: "string" + const: "false" + enum: + - "false" + - title: "Enabled" + type: "object" + properties: + enabled: + type: "string" + const: "true" + enum: + - "true" + cohorts: + name: "Cohorts" + order: 0 + type: "array" + always_show: true + items: + title: "Cohorts" + type: "object" + required: + - "dimension" + - "dateRange" + properties: + name: + title: "Name" + type: "string" + always_show: true + pattern: "^(?!(cohort_|RESERVED_)).*$" + description: + "Assigns a name to this cohort. If not set,\ + \ cohorts are named by their zero based index cohort_0,\ + \ cohort_1, etc." + order: 0 + dimension: + title: "Dimension" + description: + "Dimension used by the cohort. Required and\ + \ only supports `firstSessionDate`" + type: "string" + enum: + - "firstSessionDate" + order: 1 + dateRange: + type: "object" + required: + - "startDate" + - "endDate" + properties: + startDate: + title: "Start Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + endDate: + title: "End Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 3 + cohortsRange: + type: "object" + order: 1 + required: + - "granularity" + - "endOffset" + properties: + granularity: + title: "Granularity" + description: + "The granularity used to interpret the startOffset\ + \ and endOffset for the extended reporting date range\ + \ for a cohort report." + type: "string" + enum: + - "GRANULARITY_UNSPECIFIED" + - "DAILY" + - "WEEKLY" + - "MONTHLY" + order: 0 + startOffset: + title: "Start Offset" + description: + "Specifies the start date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 1 + endOffset: + title: "End Offset" + description: + "Specifies the end date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 2 + cohortReportSettings: + type: "object" + title: "Cohort Report Settings" + description: "Optional settings for a cohort report." + properties: + accumulate: + always_show: true + title: "Accumulate" + description: + "If true, accumulates the result from first\ + \ touch day to the end day" + type: "boolean" + required: + - "name" + - "dimensions" + - "metrics" + window_in_days: + type: "integer" + title: "Data Request Interval (Days)" + description: + "The interval in days for each data request made to the Google\ + \ Analytics API. A larger value speeds up data sync, but increases the\ + \ chance of data sampling, which may result in inaccuracies. We recommend\ + \ a value of 1 to minimize sampling, unless speed is an absolute priority\ + \ over accuracy. Acceptable values range from 1 to 364. Does not apply\ + \ to custom Cohort reports. More information is available in the documentation." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + minimum: 1 + maximum: 364 + default: 1 + order: 5 + lookback_window: + type: "integer" + title: "Lookback window (Days)" + description: + "Since attribution changes after the event date, and Google\ + \ Analytics has a data processing latency, we should specify how many\ + \ days in the past we should refresh the data in every run. So if you\ + \ set it at 5 days, in every sync it will fetch the last bookmark date\ + \ minus 5 days." + examples: + - 2 + - 3 + - 4 + - 7 + - 14 + - 28 + minimum: 2 + maximum: 60 + default: 2 + order: 6 + keep_empty_rows: + type: "boolean" + title: "Keep Empty Rows" + description: + "If false, each row with all metrics equal to 0 will not be\ + \ returned. If true, these rows will be returned if they are not separately\ + \ removed by a filter. More information is available in the documentation." + default: false + order: 7 + convert_conversions_event: + type: "boolean" + title: "Convert `conversions:*` Metrics to Float" + description: + "Enables conversion of `conversions:*` event metrics from integers\ + \ to floats. This is beneficial for preventing data rounding when the\ + \ API returns float values for any `conversions:*` fields." + default: false + order: 8 + sourceType: + title: "google-analytics-data-api" + const: "google-analytics-data-api" + enum: + - "google-analytics-data-api" + order: 0 + type: "string" + source-google-analytics-data-api-update: + title: "Google Analytics (Data API) Spec" + type: "object" + required: + - "property_ids" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Google (Oauth)" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Google Analytics developer\ + \ application." + airbyte_secret: true + order: 2 + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining a new access token." + airbyte_secret: true + order: 3 + access_token: + title: "Access Token" + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + order: 4 + - type: "object" + title: "Service Account Key Authentication" + required: + - "credentials_json" + properties: + auth_type: + type: "string" + const: "Service" + order: 0 + enum: + - "Service" + credentials_json: + title: "Service Account JSON Key" + type: "string" + description: + "The JSON key linked to the service account used for\ + \ authorization. For steps on obtaining this key, refer to the setup guide." + examples: + - "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID,\ + \ \"private_key_id\": YOUR_PRIVATE_KEY, ... }" + airbyte_secret: true + order: 1 + property_ids: + title: "Property IDs" + description: + "A list of your Property IDs. The Property ID is a unique number\ + \ assigned to each property in Google Analytics, found in your GA4 property\ + \ URL. This ID allows the connector to track the specific events associated\ + \ with your property. Refer to the Google\ + \ Analytics documentation to locate your property ID." + order: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]*$" + examples: + - - "1738294" + - "5729978930" + uniqueItems: true + date_ranges_start_date: + type: "string" + title: "Start Date" + description: + "The start date from which to replicate report data in the\ + \ format YYYY-MM-DD. Data generated before this date will not be included\ + \ in the report. Not applied to custom Cohort reports." + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 4 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name." + type: "string" + order: 0 + dimensions: + title: "Dimensions" + description: "A list of dimensions." + type: "array" + items: + type: "string" + minItems: 1 + order: 1 + metrics: + title: "Metrics" + description: "A list of metrics." + type: "array" + items: + type: "string" + minItems: 1 + order: 2 + dimensionFilter: + title: "Dimensions filter" + description: "Dimensions filter" + type: "object" + order: 3 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all dimensions." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + metricFilter: + title: "Metrics filter" + description: "Metrics filter" + type: "object" + order: 4 + oneOf: + - title: "andGroup" + description: "The FilterExpressions in andGroup have an AND relationship." + type: "object" + properties: + filter_type: + type: "string" + const: "andGroup" + order: 0 + enum: + - "andGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "orGroup" + type: "object" + description: "The FilterExpressions in orGroup have an OR relationship." + properties: + filter_type: + type: "string" + const: "orGroup" + order: 0 + enum: + - "orGroup" + expressions: + title: "Expressions" + type: "array" + order: 1 + items: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + required: + - "filter_type" + - "expressions" + - title: "notExpression" + type: "object" + description: "The FilterExpression is NOT of notExpression." + properties: + filter_type: + type: "string" + const: "notExpression" + order: 0 + enum: + - "notExpression" + expression: + title: "Expression" + type: "object" + properties: + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + - title: "filter" + type: "object" + description: + "A primitive filter. In the same FilterExpression,\ + \ all of the filter's field names need to be either all metrics." + properties: + filter_type: + type: "string" + const: "filter" + order: 0 + enum: + - "filter" + field_name: + title: "fieldName" + type: "string" + order: 1 + filter: + title: "filter" + type: "object" + order: 2 + oneOf: + - title: "stringFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "stringFilter" + enum: + - "stringFilter" + matchType: + title: "matchType" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "MATCH_TYPE_UNSPECIFIED" + - "EXACT" + - "BEGINS_WITH" + - "ENDS_WITH" + - "CONTAINS" + - "FULL_REGEXP" + - "PARTIAL_REGEXP" + value: + tittle: "value" + type: "string" + order: 0 + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 2 + required: + - "filter_name" + - "value" + - title: "inListFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "inListFilter" + enum: + - "inListFilter" + values: + tittle: "values" + type: "array" + minItems: 1 + order: 0 + items: + type: "string" + caseSensitive: + tittle: "caseSensitive" + type: "boolean" + order: 1 + required: + - "filter_name" + - "values" + - title: "numericFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "numericFilter" + enum: + - "numericFilter" + operation: + title: "operation" + type: "array" + order: 1 + items: + title: "ValidEnums" + enum: + - "OPERATION_UNSPECIFIED" + - "EQUAL" + - "LESS_THAN" + - "LESS_THAN_OR_EQUAL" + - "GREATER_THAN" + - "GREATER_THAN_OR_EQUAL" + value: + tittle: "value" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "operation" + - "value" + - title: "betweenFilter" + type: "object" + properties: + filter_name: + type: "string" + const: "betweenFilter" + enum: + - "betweenFilter" + fromValue: + tittle: "fromValue" + type: "object" + order: 0 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + toValue: + tittle: "toValue" + type: "object" + order: 1 + oneOf: + - title: "int64Value" + type: "object" + properties: + value_type: + type: "string" + const: "int64Value" + enum: + - "int64Value" + value: + type: "string" + required: + - "value_type" + - "value" + - title: "doubleValue" + type: "object" + properties: + value_type: + type: "string" + const: "doubleValue" + enum: + - "doubleValue" + value: + type: "number" + required: + - "value_type" + - "value" + required: + - "filter_name" + - "fromValue" + - "toValue" + required: + - "field_name" + - "filter" + cohortSpec: + title: "Cohort Reports" + description: + "Cohort reports creates a time series of user retention\ + \ for the cohort." + type: "object" + order: 5 + oneOf: + - title: "Disabled" + type: "object" + properties: + enabled: + type: "string" + const: "false" + enum: + - "false" + - title: "Enabled" + type: "object" + properties: + enabled: + type: "string" + const: "true" + enum: + - "true" + cohorts: + name: "Cohorts" + order: 0 + type: "array" + always_show: true + items: + title: "Cohorts" + type: "object" + required: + - "dimension" + - "dateRange" + properties: + name: + title: "Name" + type: "string" + always_show: true + pattern: "^(?!(cohort_|RESERVED_)).*$" + description: + "Assigns a name to this cohort. If not set,\ + \ cohorts are named by their zero based index cohort_0,\ + \ cohort_1, etc." + order: 0 + dimension: + title: "Dimension" + description: + "Dimension used by the cohort. Required and\ + \ only supports `firstSessionDate`" + type: "string" + enum: + - "firstSessionDate" + order: 1 + dateRange: + type: "object" + required: + - "startDate" + - "endDate" + properties: + startDate: + title: "Start Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 2 + endDate: + title: "End Date" + type: "string" + format: "date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + examples: + - "2021-01-01" + order: 3 + cohortsRange: + type: "object" + order: 1 + required: + - "granularity" + - "endOffset" + properties: + granularity: + title: "Granularity" + description: + "The granularity used to interpret the startOffset\ + \ and endOffset for the extended reporting date range\ + \ for a cohort report." + type: "string" + enum: + - "GRANULARITY_UNSPECIFIED" + - "DAILY" + - "WEEKLY" + - "MONTHLY" + order: 0 + startOffset: + title: "Start Offset" + description: + "Specifies the start date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 1 + endOffset: + title: "End Offset" + description: + "Specifies the end date of the extended reporting\ + \ date range for a cohort report." + type: "integer" + minimum: 0 + order: 2 + cohortReportSettings: + type: "object" + title: "Cohort Report Settings" + description: "Optional settings for a cohort report." + properties: + accumulate: + always_show: true + title: "Accumulate" + description: + "If true, accumulates the result from first\ + \ touch day to the end day" + type: "boolean" + required: + - "name" + - "dimensions" + - "metrics" + window_in_days: + type: "integer" + title: "Data Request Interval (Days)" + description: + "The interval in days for each data request made to the Google\ + \ Analytics API. A larger value speeds up data sync, but increases the\ + \ chance of data sampling, which may result in inaccuracies. We recommend\ + \ a value of 1 to minimize sampling, unless speed is an absolute priority\ + \ over accuracy. Acceptable values range from 1 to 364. Does not apply\ + \ to custom Cohort reports. More information is available in the documentation." + examples: + - 30 + - 60 + - 90 + - 120 + - 200 + - 364 + minimum: 1 + maximum: 364 + default: 1 + order: 5 + lookback_window: + type: "integer" + title: "Lookback window (Days)" + description: + "Since attribution changes after the event date, and Google\ + \ Analytics has a data processing latency, we should specify how many\ + \ days in the past we should refresh the data in every run. So if you\ + \ set it at 5 days, in every sync it will fetch the last bookmark date\ + \ minus 5 days." + examples: + - 2 + - 3 + - 4 + - 7 + - 14 + - 28 + minimum: 2 + maximum: 60 + default: 2 + order: 6 + keep_empty_rows: + type: "boolean" + title: "Keep Empty Rows" + description: + "If false, each row with all metrics equal to 0 will not be\ + \ returned. If true, these rows will be returned if they are not separately\ + \ removed by a filter. More information is available in the documentation." + default: false + order: 7 + convert_conversions_event: + type: "boolean" + title: "Convert `conversions:*` Metrics to Float" + description: + "Enables conversion of `conversions:*` event metrics from integers\ + \ to floats. This is beneficial for preventing data rounding when the\ + \ API returns float values for any `conversions:*` fields." + default: false + order: 8 + source-mailgun: + type: "object" + required: + - "private_key" + - "sourceType" + properties: + private_key: + type: "string" + order: 0 + title: "Private API Key" + description: "Primary account API key to access your Mailgun data." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain_region: + type: "string" + order: 1 + title: "Domain Region Code" + description: + "Domain region code. 'EU' or 'US' are possible values. The\ + \ default is 'US'." + default: "US" + enum: + - "US" + - "EU" + start_date: + type: "string" + order: 2 + title: "Replication Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2023-08-01T00:00:00Z" + description: + "UTC date and time in the format 2020-10-01 00:00:00. Any data\ + \ before this date will not be replicated. If omitted, defaults to 3 days\ + \ ago." + sourceType: + title: "mailgun" + const: "mailgun" + enum: + - "mailgun" + order: 0 + type: "string" + source-mailgun-update: + type: "object" + required: + - "private_key" + properties: + private_key: + type: "string" + order: 0 + title: "Private API Key" + description: "Primary account API key to access your Mailgun data." + airbyte_secret: true + domain_region: + type: "string" + order: 1 + title: "Domain Region Code" + description: + "Domain region code. 'EU' or 'US' are possible values. The\ + \ default is 'US'." + default: "US" + enum: + - "US" + - "EU" + start_date: + type: "string" + order: 2 + title: "Replication Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2023-08-01T00:00:00Z" + description: + "UTC date and time in the format 2020-10-01 00:00:00. Any data\ + \ before this date will not be replicated. If omitted, defaults to 3 days\ + \ ago." + source-intercom: + title: "Source Intercom Spec" + type: "object" + required: + - "start_date" + - "access_token" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + title: "Access token" + type: "string" + description: + "Access token for making authenticated requests. See the Intercom docs for more information." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + activity_logs_time_step: + type: "integer" + default: 30 + minimum: 1 + maximum: 91 + title: "Activity logs stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of Activity\ + \ Logs stream." + examples: + - 30 + - 10 + - 5 + order: 3 + sourceType: + title: "intercom" + const: "intercom" + enum: + - "intercom" + order: 0 + type: "string" + source-intercom-update: + title: "Source Intercom Spec" + type: "object" + required: + - "start_date" + - "access_token" + properties: + start_date: + type: "string" + title: "Start date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + title: "Access token" + type: "string" + description: + "Access token for making authenticated requests. See the Intercom docs for more information." + airbyte_secret: true + order: 0 + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + activity_logs_time_step: + type: "integer" + default: 30 + minimum: 1 + maximum: 91 + title: "Activity logs stream slice step size (in days)" + description: + "Set lower value in case of failing long running sync of Activity\ + \ Logs stream." + examples: + - 30 + - 10 + - 5 + order: 3 + source-rki-covid: + title: "RKI Covid Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format 2017-01-25. Any data before this date\ + \ will not be replicated." + order: 1 + sourceType: + title: "rki-covid" + const: "rki-covid" + enum: + - "rki-covid" + order: 0 + type: "string" + source-rki-covid-update: + title: "RKI Covid Spec" + type: "object" + required: + - "start_date" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "UTC date in the format 2017-01-25. Any data before this date\ + \ will not be replicated." + order: 1 + source-secoda: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "Api Key" + airbyte_secret: true + description: + "Your API Access Key. See here. The key is case sensitive." + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "secoda" + const: "secoda" + enum: + - "secoda" + order: 0 + type: "string" + source-secoda-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "Api Key" + airbyte_secret: true + description: + "Your API Access Key. See here. The key is case sensitive." + order: 0 + source-zoom: + title: "Zoom Spec" + type: "object" + required: + - "account_id" + - "client_id" + - "client_secret" + - "authorization_endpoint" + - "sourceType" + properties: + account_id: + type: "string" + order: 0 + description: + "The account ID for your Zoom account. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + client_id: + type: "string" + order: 1 + description: + "The client ID for your Zoom app. You can find this in the\ + \ Zoom Marketplace under the \"Manage\" tab for your app." + client_secret: + type: "string" + order: 2 + description: + "The client secret for your Zoom app. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + airbyte_secret: true + x-speakeasy-param-sensitive: true + authorization_endpoint: + type: "string" + order: 3 + default: "https://zoom.us/oauth/token" + sourceType: + title: "zoom" + const: "zoom" + enum: + - "zoom" + order: 0 + type: "string" + source-zoom-update: + title: "Zoom Spec" + type: "object" + required: + - "account_id" + - "client_id" + - "client_secret" + - "authorization_endpoint" + properties: + account_id: + type: "string" + order: 0 + description: + "The account ID for your Zoom account. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + client_id: + type: "string" + order: 1 + description: + "The client ID for your Zoom app. You can find this in the\ + \ Zoom Marketplace under the \"Manage\" tab for your app." + client_secret: + type: "string" + order: 2 + description: + "The client secret for your Zoom app. You can find this in\ + \ the Zoom Marketplace under the \"Manage\" tab for your app." + airbyte_secret: true + authorization_endpoint: + type: "string" + order: 3 + default: "https://zoom.us/oauth/token" + source-delighted: + title: "Delighted Spec" + type: "object" + required: + - "since" + - "api_key" + - "sourceType" + properties: + api_key: + title: "Delighted API Key" + type: "string" + description: "A Delighted API key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + since: + title: "Replication Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + examples: + - "2022-05-30T04:50:23Z" + - "2022-05-30 04:50:23" + pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" + order: 1 + format: "date-time" + sourceType: + title: "delighted" + const: "delighted" + enum: + - "delighted" + order: 0 + type: "string" + source-delighted-update: + title: "Delighted Spec" + type: "object" + required: + - "since" + - "api_key" + properties: + api_key: + title: "Delighted API Key" + type: "string" + description: "A Delighted API key." + airbyte_secret: true + order: 0 + since: + title: "Replication Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + examples: + - "2022-05-30T04:50:23Z" + - "2022-05-30 04:50:23" + pattern: "^\\d{4}-\\d{2}-\\d{2}[T ]\\d{2}:\\d{2}:\\d{2}(\\.\\d+)?Z?$" + order: 1 + format: "date-time" + source-klarna: + title: "Klarna Spec" + type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + - "sourceType" + properties: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "na" + - "oc" + description: + "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'na', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: + "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: + "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: + "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "klarna" + const: "klarna" + enum: + - "klarna" + order: 0 + type: "string" + source-klarna-update: + title: "Klarna Spec" + type: "object" + required: + - "region" + - "playground" + - "username" + - "password" + properties: + region: + title: "Region" + type: "string" + enum: + - "eu" + - "na" + - "oc" + description: + "Base url region (For playground eu https://docs.klarna.com/klarna-payments/api/payments-api/#tag/API-URLs).\ + \ Supported 'eu', 'na', 'oc'" + playground: + title: "Playground" + type: "boolean" + description: + "Propertie defining if connector is used against playground\ + \ or production environment" + default: false + username: + title: "Username" + type: "string" + description: + "Consists of your Merchant ID (eid) - a unique number that\ + \ identifies your e-store, combined with a random string (https://developers.klarna.com/api/#authentication)" + password: + title: "Password" + type: "string" + description: + "A string which is associated with your Merchant ID and is\ + \ used to authorize use of Klarna's APIs (https://developers.klarna.com/api/#authentication)" + airbyte_secret: true + source-typeform: + type: "object" + required: + - "credentials" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Typeform account and then generate a personal\ + \ Access Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Typeform\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + format: "date-time" + form_ids: + title: "Form IDs to replicate" + description: + "When this parameter is set, the connector will replicate data\ + \ only from the input forms. Otherwise, all forms in your Typeform account\ + \ will be replicated. You can find form IDs in your form URLs. For example,\ + \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\ + \ u6nXL7. You can find form URLs on Share panel" + type: "array" + items: + type: "string" + uniqueItems: true + order: 3 + sourceType: + title: "typeform" + const: "typeform" + enum: + - "typeform" + order: 0 + type: "string" + source-typeform-update: + type: "object" + required: + - "credentials" + properties: + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + properties: + auth_type: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + description: "The key to refresh the expired access_token." + airbyte_secret: true + - title: "Private Token" + type: "object" + required: + - "access_token" + properties: + auth_type: + type: "string" + const: "access_token" + enum: + - "access_token" + access_token: + type: "string" + title: "Private Token" + description: + "Log into your Typeform account and then generate a personal\ + \ Access Token." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Typeform\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + format: "date-time" + form_ids: + title: "Form IDs to replicate" + description: + "When this parameter is set, the connector will replicate data\ + \ only from the input forms. Otherwise, all forms in your Typeform account\ + \ will be replicated. You can find form IDs in your form URLs. For example,\ + \ in the URL \"https://mysite.typeform.com/to/u6nXL7\" the form_id is\ + \ u6nXL7. You can find form URLs on Share panel" + type: "array" + items: + type: "string" + uniqueItems: true + order: 3 + source-dremio: + title: "Dremio Spec" + type: "object" + required: + - "api_key" + - "base_url" + - "sourceType" + properties: + api_key: + type: "string" + description: + "API Key that is generated when you authenticate to Dremio\ + \ API" + airbyte_secret: true + x-speakeasy-param-sensitive: true + base_url: + type: "string" + description: "URL of your Dremio instance" + default: "https://app.dremio.cloud" + sourceType: + title: "dremio" + const: "dremio" + enum: + - "dremio" + order: 0 + type: "string" + source-dremio-update: + title: "Dremio Spec" + type: "object" + required: + - "api_key" + - "base_url" + properties: + api_key: + type: "string" + description: + "API Key that is generated when you authenticate to Dremio\ + \ API" + airbyte_secret: true + base_url: + type: "string" + description: "URL of your Dremio instance" + default: "https://app.dremio.cloud" + source-cimis: + type: "object" + required: + - "api_key" + - "targets_type" + - "targets" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + targets_type: + type: "string" + title: "Targets Type" + enum: + - "WSN station numbers" + - "California zip codes" + - "decimal-degree coordinates" + - "street addresses" + order: 1 + targets: + type: "array" + title: "Targets" + order: 2 + daily_data_items: + type: "array" + title: "Daily Data Items" + enum: + - "day-air-tmp-avg" + - "day-air-tmp-min" + - "day-dew-pnt" + - "day-eto" + - "day-asce-eto" + - "day-asce-etr" + - "day-precip" + - "day-rel-hum-avg" + - "day-rel-hum-max" + - "day-rel-hum-min" + - "day-soil-tmp-avg" + - "day-soil-tmp-max" + - "day-soil-tmp-min" + - "day-sol-rad-avg" + - "day-sol-rad-net" + - "day-vap-pres-max" + - "day-vap-pres-avg" + - "day-wind-ene" + - "day-wind-ese" + - "day-wind-nne" + - "day-wind-nnw" + - "day-wind-run" + - "day-wind-spd-avg" + - "day-wind-ssw" + - "day-wind-wnw" + - "day-wind-wsw" + order: 3 + hourly_data_items: + type: "array" + title: "Hourly Data Items" + enum: + - "hly-air-tmp" + - "hly-dew-pnt" + - "hly-eto" + - "hly-net-rad" + - "hly-asce-eto" + - "hly-asce-etr" + - "hly-precip" + - "hly-rel-hum" + - "hly-res-wind" + - "hly-soil-tmp" + - "hly-sol-rad" + - "hly-vap-pres" + - "hly-wind-dir" + - "hly-wind-spd" + order: 4 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + unit_of_measure: + type: "string" + title: "Unit of Measure" + enum: + - "E" + - "M" + order: 7 + sourceType: + title: "cimis" + const: "cimis" + enum: + - "cimis" + order: 0 + type: "string" + source-cimis-update: + type: "object" + required: + - "api_key" + - "targets_type" + - "targets" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + name: "api_key" + title: "API Key" + airbyte_secret: true + order: 0 + targets_type: + type: "string" + title: "Targets Type" + enum: + - "WSN station numbers" + - "California zip codes" + - "decimal-degree coordinates" + - "street addresses" + order: 1 + targets: + type: "array" + title: "Targets" + order: 2 + daily_data_items: + type: "array" + title: "Daily Data Items" + enum: + - "day-air-tmp-avg" + - "day-air-tmp-min" + - "day-dew-pnt" + - "day-eto" + - "day-asce-eto" + - "day-asce-etr" + - "day-precip" + - "day-rel-hum-avg" + - "day-rel-hum-max" + - "day-rel-hum-min" + - "day-soil-tmp-avg" + - "day-soil-tmp-max" + - "day-soil-tmp-min" + - "day-sol-rad-avg" + - "day-sol-rad-net" + - "day-vap-pres-max" + - "day-vap-pres-avg" + - "day-wind-ene" + - "day-wind-ese" + - "day-wind-nne" + - "day-wind-nnw" + - "day-wind-run" + - "day-wind-spd-avg" + - "day-wind-ssw" + - "day-wind-wnw" + - "day-wind-wsw" + order: 3 + hourly_data_items: + type: "array" + title: "Hourly Data Items" + enum: + - "hly-air-tmp" + - "hly-dew-pnt" + - "hly-eto" + - "hly-net-rad" + - "hly-asce-eto" + - "hly-asce-etr" + - "hly-precip" + - "hly-rel-hum" + - "hly-res-wind" + - "hly-soil-tmp" + - "hly-sol-rad" + - "hly-vap-pres" + - "hly-wind-dir" + - "hly-wind-spd" + order: 4 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 5 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 6 + unit_of_measure: + type: "string" + title: "Unit of Measure" + enum: + - "E" + - "M" + order: 7 + source-paypal-transaction: + type: "object" + required: + - "client_id" + - "client_secret" + - "start_date" + - "is_sandbox" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Paypal developer application." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "The Client Secret of your Paypal developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\ + \ present time." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 2 + is_sandbox: + title: "Sandbox" + description: "Determines whether to use the sandbox or production environment." + type: "boolean" + default: false + dispute_start_date: + title: "Dispute Start Date Range" + description: + "Start Date parameter for the list dispute endpoint in ISO format.\ + \ This Start Date must be in range within 180 days before present time,\ + \ and requires ONLY 3 miliseconds(mandatory). If you don't use this option,\ + \ it defaults to a start date set 180 days in the past." + type: "string" + examples: + - "2021-06-11T23:59:59.000Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{3}Z$" + format: "date-time" + order: 3 + end_date: + title: "End Date" + description: + "End Date for data extraction in ISO format. This can be help you select specific range of time,\ + \ mainly for test purposes or data integrity tests. When this is not\ + \ used, now_utc() is used by the streams. This does not apply to Disputes\ + \ and Product streams." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 4 + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + time_window: + type: "integer" + title: "Number of days per request" + description: + "The number of days per request. Must be a number between 1\ + \ and 31." + default: 7 + minimum: 1 + maximum: 31 + sourceType: + title: "paypal-transaction" + const: "paypal-transaction" + enum: + - "paypal-transaction" + order: 0 + type: "string" + source-paypal-transaction-update: + type: "object" + required: + - "client_id" + - "client_secret" + - "start_date" + - "is_sandbox" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Paypal developer application." + airbyte_secret: true + order: 0 + client_secret: + type: "string" + title: "Client secret" + description: "The Client Secret of your Paypal developer application." + airbyte_secret: true + order: 1 + start_date: + title: "Start Date" + description: + "Start Date for data extraction in ISO format. Date must be in range from 3 years till 12 hrs before\ + \ present time." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 2 + is_sandbox: + title: "Sandbox" + description: "Determines whether to use the sandbox or production environment." + type: "boolean" + default: false + dispute_start_date: + title: "Dispute Start Date Range" + description: + "Start Date parameter for the list dispute endpoint in ISO format.\ + \ This Start Date must be in range within 180 days before present time,\ + \ and requires ONLY 3 miliseconds(mandatory). If you don't use this option,\ + \ it defaults to a start date set 180 days in the past." + type: "string" + examples: + - "2021-06-11T23:59:59.000Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\\.[0-9]{3}Z$" + format: "date-time" + order: 3 + end_date: + title: "End Date" + description: + "End Date for data extraction in ISO format. This can be help you select specific range of time,\ + \ mainly for test purposes or data integrity tests. When this is not\ + \ used, now_utc() is used by the streams. This does not apply to Disputes\ + \ and Product streams." + type: "string" + examples: + - "2021-06-11T23:59:59Z" + - "2021-06-11T23:59:59+00:00" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}(|Z|[+-][0-9]{2}:[0-9]{2})$" + format: "date-time" + order: 4 + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + time_window: + type: "integer" + title: "Number of days per request" + description: + "The number of days per request. Must be a number between 1\ + \ and 31." + default: 7 + minimum: 1 + maximum: 31 + source-lemlist: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + title": "API key" + description: "Lemlist API key," + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "lemlist" + const: "lemlist" + enum: + - "lemlist" + order: 0 + type: "string" + source-lemlist-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + title": "API key" + description: "Lemlist API key," + order: 0 + source-pexels-api: + type: "object" + required: + - "api_key" + - "query" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key from the pexels website" + airbyte_secret: true + description: + "API key is required to access pexels api, For getting your's\ + \ goto https://www.pexels.com/api/documentation and create account for\ + \ free." + order: 0 + x-speakeasy-param-sensitive: true + color: + type: "string" + title: "Specific color for the search" + description: + "Optional, Desired photo color. Supported colors red, orange,\ + \ yellow, green, turquoise, blue, violet, pink, brown, black, gray, white\ + \ or any hexidecimal color code." + examples: + - "red" + - "orange" + order: 1 + locale: + type: "string" + title: "Specific locale for the search" + description: + "Optional, The locale of the search you are performing. The\ + \ current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE'\ + \ 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR'\ + \ 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR'\ + \ 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'." + examples: + - "en-US" + - "pt-BR" + order: 2 + orientation: + type: "string" + title: "Specific orientation for the search" + description: + "Optional, Desired photo orientation. The current supported\ + \ orientations are landscape, portrait or square" + examples: + - "square" + - "landscape" + order: 3 + query: + type: "string" + title: "Specific query for the search" + description: + "Optional, the search query, Example Ocean, Tigers, Pears,\ + \ etc." + examples: + - "people" + - "oceans" + order: 4 + size: + type: "string" + title: "Specific size for the search" + description: + "Optional, Minimum photo size. The current supported sizes\ + \ are large(24MP), medium(12MP) or small(4MP)." + examples: + - "large" + - "small" + order: 5 + sourceType: + title: "pexels-api" + const: "pexels-api" + enum: + - "pexels-api" + order: 0 + type: "string" + source-pexels-api-update: + type: "object" + required: + - "api_key" + - "query" + properties: + api_key: + type: "string" + title: "API Key from the pexels website" + airbyte_secret: true + description: + "API key is required to access pexels api, For getting your's\ + \ goto https://www.pexels.com/api/documentation and create account for\ + \ free." + order: 0 + color: + type: "string" + title: "Specific color for the search" + description: + "Optional, Desired photo color. Supported colors red, orange,\ + \ yellow, green, turquoise, blue, violet, pink, brown, black, gray, white\ + \ or any hexidecimal color code." + examples: + - "red" + - "orange" + order: 1 + locale: + type: "string" + title: "Specific locale for the search" + description: + "Optional, The locale of the search you are performing. The\ + \ current supported locales are 'en-US' 'pt-BR' 'es-ES' 'ca-ES' 'de-DE'\ + \ 'it-IT' 'fr-FR' 'sv-SE' 'id-ID' 'pl-PL' 'ja-JP' 'zh-TW' 'zh-CN' 'ko-KR'\ + \ 'th-TH' 'nl-NL' 'hu-HU' 'vi-VN' 'cs-CZ' 'da-DK' 'fi-FI' 'uk-UA' 'el-GR'\ + \ 'ro-RO' 'nb-NO' 'sk-SK' 'tr-TR' 'ru-RU'." + examples: + - "en-US" + - "pt-BR" + order: 2 + orientation: + type: "string" + title: "Specific orientation for the search" + description: + "Optional, Desired photo orientation. The current supported\ + \ orientations are landscape, portrait or square" + examples: + - "square" + - "landscape" + order: 3 + query: + type: "string" + title: "Specific query for the search" + description: + "Optional, the search query, Example Ocean, Tigers, Pears,\ + \ etc." + examples: + - "people" + - "oceans" + order: 4 + size: + type: "string" + title: "Specific size for the search" + description: + "Optional, Minimum photo size. The current supported sizes\ + \ are large(24MP), medium(12MP) or small(4MP)." + examples: + - "large" + - "small" + order: 5 + source-leadfeeder: + type: "object" + required: + - "api_token" + - "start_date" + - "sourceType" + properties: + api_token: + type: "string" + order: 0 + title: "Api Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "leadfeeder" + const: "leadfeeder" + enum: + - "leadfeeder" + order: 0 + type: "string" + source-leadfeeder-update: + type: "object" + required: + - "api_token" + - "start_date" + properties: + api_token: + type: "string" + order: 0 + title: "Api Token" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-glassfrog: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API key provided by Glassfrog" + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "glassfrog" + const: "glassfrog" + enum: + - "glassfrog" + order: 0 + type: "string" + source-glassfrog-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: "API key provided by Glassfrog" + order: 0 + source-appcues: + type: "object" + required: + - "username" + - "account_id" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + account_id: + type: "string" + description: "Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)" + order: 2 + title: "Account ID" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "appcues" + const: "appcues" + enum: + - "appcues" + order: 0 + type: "string" + source-appcues-update: + type: "object" + required: + - "username" + - "account_id" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + account_id: + type: "string" + description: "Account ID of Appcues found in account settings page (https://studio.appcues.com/settings/account)" + order: 2 + title: "Account ID" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-facebook-marketing: + title: "Source Facebook Marketing" + type: "object" + properties: + account_ids: + title: "Ad Account ID(s)" + description: + "The Facebook Ad account ID(s) to pull data from. The Ad account\ + \ ID number is in the account dropdown menu or in your browser's address\ + \ bar of your Meta Ads Manager. See the docs for more information." + order: 0 + pattern_descriptor: "The Ad Account ID must be a number." + examples: + - "111111111111111" + minItems: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]+$" + uniqueItems: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’s Dashboard,\ + \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\ + \ ads_read, read_insights, business_management. Then click on \"Get\ + \ token\". See the docs for more information." + order: 1 + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + credentials: + title: "Authentication" + description: "Credentials for connecting to the Facebook Marketing API" + type: "object" + oneOf: + - title: "Authenticate via Facebook Marketing (Oauth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Facebook Marketing API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret for the Facebook Marketing API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "access_token" + - "auth_type" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for all incremental\ + \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\ + \ will be replicated for usual streams and only last 2 years for insight\ + \ streams." + order: 2 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not setting\ + \ this option will result in always syncing the latest data." + order: 3 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + campaign_statuses: + title: "Campaign Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 4 + type: "array" + items: + title: "ValidCampaignStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + adset_statuses: + title: "AdSet Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 5 + type: "array" + items: + title: "ValidAdSetStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + ad_statuses: + title: "Ad Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 6 + type: "array" + items: + title: "ValidAdStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ADSET_PAUSED" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "DISAPPROVED" + - "IN_PROCESS" + - "PAUSED" + - "PENDING_BILLING_INFO" + - "PENDING_REVIEW" + - "PREAPPROVED" + - "WITH_ISSUES" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images from Ad Creative" + description: + "Set to active if you want to fetch the thumbnail_url and store\ + \ the result in thumbnail_data_url for each Ad Creative." + default: false + order: 7 + type: "boolean" + custom_insights: + title: "Custom Insights" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + order: 8 + type: "array" + items: + title: "InsightConfig" + description: "Config for custom insights" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + level: + title: "Level" + description: "Chosen level for API" + default: "ad" + enum: + - "ad" + - "adset" + - "campaign" + - "account" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "account_currency" + - "account_id" + - "account_name" + - "action_values" + - "actions" + - "ad_click_actions" + - "ad_id" + - "ad_impression_actions" + - "ad_name" + - "adset_end" + - "adset_id" + - "adset_name" + - "age_targeting" + - "attribution_setting" + - "auction_bid" + - "auction_competitiveness" + - "auction_max_competitor_bid" + - "buying_type" + - "campaign_id" + - "campaign_name" + - "canvas_avg_view_percent" + - "canvas_avg_view_time" + - "catalog_segment_actions" + - "catalog_segment_value" + - "catalog_segment_value_mobile_purchase_roas" + - "catalog_segment_value_omni_purchase_roas" + - "catalog_segment_value_website_purchase_roas" + - "clicks" + - "conversion_rate_ranking" + - "conversion_values" + - "conversions" + - "converted_product_quantity" + - "converted_product_value" + - "cost_per_15_sec_video_view" + - "cost_per_2_sec_continuous_video_view" + - "cost_per_action_type" + - "cost_per_ad_click" + - "cost_per_conversion" + - "cost_per_dda_countby_convs" + - "cost_per_estimated_ad_recallers" + - "cost_per_inline_link_click" + - "cost_per_inline_post_engagement" + - "cost_per_one_thousand_ad_impression" + - "cost_per_outbound_click" + - "cost_per_thruplay" + - "cost_per_unique_action_type" + - "cost_per_unique_click" + - "cost_per_unique_conversion" + - "cost_per_unique_inline_link_click" + - "cost_per_unique_outbound_click" + - "cpc" + - "cpm" + - "cpp" + - "created_time" + - "creative_media_type" + - "ctr" + - "date_start" + - "date_stop" + - "dda_countby_convs" + - "dda_results" + - "engagement_rate_ranking" + - "estimated_ad_recall_rate" + - "estimated_ad_recall_rate_lower_bound" + - "estimated_ad_recall_rate_upper_bound" + - "estimated_ad_recallers" + - "estimated_ad_recallers_lower_bound" + - "estimated_ad_recallers_upper_bound" + - "frequency" + - "full_view_impressions" + - "full_view_reach" + - "gender_targeting" + - "impressions" + - "inline_link_click_ctr" + - "inline_link_clicks" + - "inline_post_engagement" + - "instagram_upcoming_event_reminders_set" + - "instant_experience_clicks_to_open" + - "instant_experience_clicks_to_start" + - "instant_experience_outbound_clicks" + - "interactive_component_tap" + - "labels" + - "location" + - "marketing_messages_cost_per_delivered" + - "marketing_messages_cost_per_link_btn_click" + - "marketing_messages_spend" + - "mobile_app_purchase_roas" + - "objective" + - "optimization_goal" + - "outbound_clicks" + - "outbound_clicks_ctr" + - "place_page_name" + - "purchase_roas" + - "qualifying_question_qualify_answer_rate" + - "quality_ranking" + - "reach" + - "social_spend" + - "spend" + - "total_postbacks" + - "total_postbacks_detailed" + - "total_postbacks_detailed_v4" + - "unique_actions" + - "unique_clicks" + - "unique_conversions" + - "unique_ctr" + - "unique_inline_link_click_ctr" + - "unique_inline_link_clicks" + - "unique_link_clicks_ctr" + - "unique_outbound_clicks" + - "unique_outbound_clicks_ctr" + - "unique_video_continuous_2_sec_watched_actions" + - "unique_video_view_15_sec" + - "updated_time" + - "video_15_sec_watched_actions" + - "video_30_sec_watched_actions" + - "video_avg_time_watched_actions" + - "video_continuous_2_sec_watched_actions" + - "video_p100_watched_actions" + - "video_p25_watched_actions" + - "video_p50_watched_actions" + - "video_p75_watched_actions" + - "video_p95_watched_actions" + - "video_play_actions" + - "video_play_curve_actions" + - "video_play_retention_0_to_15s_actions" + - "video_play_retention_20_to_60s_actions" + - "video_play_retention_graph_actions" + - "video_thruplay_watched_actions" + - "video_time_watched_actions" + - "website_ctr" + - "website_purchase_roas" + - "wish_bid" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + title: "ValidBreakdowns" + description: "An enumeration." + enum: + - "ad_format_asset" + - "age" + - "app_id" + - "body_asset" + - "call_to_action_asset" + - "coarse_conversion_value" + - "country" + - "description_asset" + - "device_platform" + - "dma" + - "fidelity_type" + - "frequency_value" + - "gender" + - "hourly_stats_aggregated_by_advertiser_time_zone" + - "hourly_stats_aggregated_by_audience_time_zone" + - "hsid" + - "image_asset" + - "impression_device" + - "is_conversion_id_modeled" + - "landing_destination" + - "link_url_asset" + - "marketing_messages_btn_name" + - "mdsa_landing_destination" + - "media_asset_url" + - "media_creator" + - "media_destination_url" + - "media_format" + - "media_origin_url" + - "media_text_content" + - "mmm" + - "place_page_id" + - "platform_position" + - "postback_sequence_index" + - "product_id" + - "publisher_platform" + - "redownload" + - "region" + - "skan_campaign_id" + - "skan_conversion_id" + - "skan_version" + - "standard_event_content_type" + - "title_asset" + - "video_asset" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + title: "ValidActionBreakdowns" + description: "An enumeration." + enum: + - "action_canvas_component_name" + - "action_carousel_card_id" + - "action_carousel_card_name" + - "action_destination" + - "action_device" + - "action_reaction" + - "action_target_id" + - "action_type" + - "action_video_sound" + - "action_video_type" + - "standard_event_content_type" + action_report_time: + title: "Action Report Time" + description: + "Determines the report time of action stats. For example,\ + \ if a person saw the ad on Jan 1st but converted on Jan 2nd, when\ + \ you query the API with action_report_time=impression, you see\ + \ a conversion on Jan 1st. When you query the API with action_report_time=conversion,\ + \ you see a conversion on Jan 2nd." + default: "mixed" + enum: + - "conversion" + - "impression" + - "mixed" + type: "string" + time_increment: + title: "Time Increment" + description: + "Time window in days by which to aggregate statistics.\ + \ The sync will be chunked into N day intervals, where N is the\ + \ number of days you specified. For example, if you set this value\ + \ to 7, then all statistics will be reported as 7-day aggregates\ + \ by starting from the start_date. If the start and end dates are\ + \ October 1st and October 30th, then the connector will output 5\ + \ records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days\ + \ only). The minimum allowed value for this field is 1, and the\ + \ maximum is 89." + default: 1 + maximum: 89 + minimum: 1 + exclusiveMinimum: 0 + type: "integer" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not\ + \ setting this option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + insights_lookback_window: + title: "Custom Insights Lookback Window" + description: "The attribution window" + default: 28 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Custom Insights Job Timeout" + description: "The insights job timeout" + default: 60 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + page_size: + title: "Page Size of Requests" + description: + "Page size used when sending requests to Facebook API to specify\ + \ number of records per page when response has pagination. Most users\ + \ do not need to set this field unless they specifically need to tune\ + \ the connector to address specific issues or use cases." + default: 100 + order: 10 + exclusiveMinimum: 0 + type: "integer" + insights_lookback_window: + title: "Insights Lookback Window" + description: + "The attribution window. Facebook freezes insight data 28 days\ + \ after it was generated, which means that all data from the past 28 days\ + \ may have changed since we last emitted it, so you can retrieve refreshed\ + \ insights from the past by setting this parameter. If you set a custom\ + \ lookback window value in Facebook account, please provide the same value\ + \ here." + default: 28 + order: 11 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Insights Job Timeout" + description: + "Insights Job Timeout establishes the maximum amount of time\ + \ (in minutes) of waiting for the report job to complete. When timeout\ + \ is reached the job is considered failed and we are trying to request\ + \ smaller amount of data by breaking the job to few smaller ones. If you\ + \ definitely know that 60 minutes is not enough for your report to be\ + \ processed then you can decrease the timeout value, so we start breaking\ + \ job to smaller parts faster." + default: 60 + order: 12 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + sourceType: + title: "facebook-marketing" + const: "facebook-marketing" + enum: + - "facebook-marketing" + order: 0 + type: "string" + required: + - "account_ids" + - "credentials" + - "sourceType" + source-facebook-marketing-update: + title: "Source Facebook Marketing" + type: "object" + properties: + account_ids: + title: "Ad Account ID(s)" + description: + "The Facebook Ad account ID(s) to pull data from. The Ad account\ + \ ID number is in the account dropdown menu or in your browser's address\ + \ bar of your Meta Ads Manager. See the docs for more information." + order: 0 + pattern_descriptor: "The Ad Account ID must be a number." + examples: + - "111111111111111" + minItems: 1 + type: "array" + items: + type: "string" + pattern: "^[0-9]+$" + uniqueItems: true + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’s Dashboard,\ + \ click on \"Marketing API\" then \"Tools\". Select permissions ads_management,\ + \ ads_read, read_insights, business_management. Then click on \"Get\ + \ token\". See the docs for more information." + order: 1 + airbyte_secret: true + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Facebook Marketing API" + type: "object" + oneOf: + - title: "Authenticate via Facebook Marketing (Oauth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Facebook Marketing API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Facebook Marketing API" + airbyte_secret: true + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + required: + - "client_id" + - "client_secret" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + access_token: + title: "Access Token" + description: + "The value of the generated access token. From your App’\ + s Dashboard, click on \"Marketing API\" then \"Tools\". Select permissions\ + \ ads_management, ads_read, read_insights, business_management.\ + \ Then click on \"Get token\". See the docs for more information." + airbyte_secret: true + type: "string" + required: + - "access_token" + - "auth_type" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for all incremental\ + \ streams, in the format YYYY-MM-DDT00:00:00Z. If not set then all data\ + \ will be replicated for usual streams and only last 2 years for insight\ + \ streams." + order: 2 + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for all\ + \ incremental streams, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not setting\ + \ this option will result in always syncing the latest data." + order: 3 + pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + campaign_statuses: + title: "Campaign Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 4 + type: "array" + items: + title: "ValidCampaignStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + adset_statuses: + title: "AdSet Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 5 + type: "array" + items: + title: "ValidAdSetStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "IN_PROCESS" + - "PAUSED" + - "WITH_ISSUES" + ad_statuses: + title: "Ad Statuses" + description: + "Select the statuses you want to be loaded in the stream. If\ + \ no specific statuses are selected, the API's default behavior applies,\ + \ and some statuses may be filtered out." + default: [] + order: 6 + type: "array" + items: + title: "ValidAdStatuses" + description: "An enumeration." + enum: + - "ACTIVE" + - "ADSET_PAUSED" + - "ARCHIVED" + - "CAMPAIGN_PAUSED" + - "DELETED" + - "DISAPPROVED" + - "IN_PROCESS" + - "PAUSED" + - "PENDING_BILLING_INFO" + - "PENDING_REVIEW" + - "PREAPPROVED" + - "WITH_ISSUES" + fetch_thumbnail_images: + title: "Fetch Thumbnail Images from Ad Creative" + description: + "Set to active if you want to fetch the thumbnail_url and store\ + \ the result in thumbnail_data_url for each Ad Creative." + default: false + order: 7 + type: "boolean" + custom_insights: + title: "Custom Insights" + description: + "A list which contains ad statistics entries, each entry must\ + \ have a name and can contains fields, breakdowns or action_breakdowns.\ + \ Click on \"add\" to fill this field." + order: 8 + type: "array" + items: + title: "InsightConfig" + description: "Config for custom insights" + type: "object" + properties: + name: + title: "Name" + description: "The name value of insight" + type: "string" + level: + title: "Level" + description: "Chosen level for API" + default: "ad" + enum: + - "ad" + - "adset" + - "campaign" + - "account" + type: "string" + fields: + title: "Fields" + description: "A list of chosen fields for fields parameter" + default: [] + type: "array" + items: + title: "ValidEnums" + description: "An enumeration." + enum: + - "account_currency" + - "account_id" + - "account_name" + - "action_values" + - "actions" + - "ad_click_actions" + - "ad_id" + - "ad_impression_actions" + - "ad_name" + - "adset_end" + - "adset_id" + - "adset_name" + - "age_targeting" + - "attribution_setting" + - "auction_bid" + - "auction_competitiveness" + - "auction_max_competitor_bid" + - "buying_type" + - "campaign_id" + - "campaign_name" + - "canvas_avg_view_percent" + - "canvas_avg_view_time" + - "catalog_segment_actions" + - "catalog_segment_value" + - "catalog_segment_value_mobile_purchase_roas" + - "catalog_segment_value_omni_purchase_roas" + - "catalog_segment_value_website_purchase_roas" + - "clicks" + - "conversion_rate_ranking" + - "conversion_values" + - "conversions" + - "converted_product_quantity" + - "converted_product_value" + - "cost_per_15_sec_video_view" + - "cost_per_2_sec_continuous_video_view" + - "cost_per_action_type" + - "cost_per_ad_click" + - "cost_per_conversion" + - "cost_per_dda_countby_convs" + - "cost_per_estimated_ad_recallers" + - "cost_per_inline_link_click" + - "cost_per_inline_post_engagement" + - "cost_per_one_thousand_ad_impression" + - "cost_per_outbound_click" + - "cost_per_thruplay" + - "cost_per_unique_action_type" + - "cost_per_unique_click" + - "cost_per_unique_conversion" + - "cost_per_unique_inline_link_click" + - "cost_per_unique_outbound_click" + - "cpc" + - "cpm" + - "cpp" + - "created_time" + - "creative_media_type" + - "ctr" + - "date_start" + - "date_stop" + - "dda_countby_convs" + - "dda_results" + - "engagement_rate_ranking" + - "estimated_ad_recall_rate" + - "estimated_ad_recall_rate_lower_bound" + - "estimated_ad_recall_rate_upper_bound" + - "estimated_ad_recallers" + - "estimated_ad_recallers_lower_bound" + - "estimated_ad_recallers_upper_bound" + - "frequency" + - "full_view_impressions" + - "full_view_reach" + - "gender_targeting" + - "impressions" + - "inline_link_click_ctr" + - "inline_link_clicks" + - "inline_post_engagement" + - "instagram_upcoming_event_reminders_set" + - "instant_experience_clicks_to_open" + - "instant_experience_clicks_to_start" + - "instant_experience_outbound_clicks" + - "interactive_component_tap" + - "labels" + - "location" + - "marketing_messages_cost_per_delivered" + - "marketing_messages_cost_per_link_btn_click" + - "marketing_messages_spend" + - "mobile_app_purchase_roas" + - "objective" + - "optimization_goal" + - "outbound_clicks" + - "outbound_clicks_ctr" + - "place_page_name" + - "purchase_roas" + - "qualifying_question_qualify_answer_rate" + - "quality_ranking" + - "reach" + - "social_spend" + - "spend" + - "total_postbacks" + - "total_postbacks_detailed" + - "total_postbacks_detailed_v4" + - "unique_actions" + - "unique_clicks" + - "unique_conversions" + - "unique_ctr" + - "unique_inline_link_click_ctr" + - "unique_inline_link_clicks" + - "unique_link_clicks_ctr" + - "unique_outbound_clicks" + - "unique_outbound_clicks_ctr" + - "unique_video_continuous_2_sec_watched_actions" + - "unique_video_view_15_sec" + - "updated_time" + - "video_15_sec_watched_actions" + - "video_30_sec_watched_actions" + - "video_avg_time_watched_actions" + - "video_continuous_2_sec_watched_actions" + - "video_p100_watched_actions" + - "video_p25_watched_actions" + - "video_p50_watched_actions" + - "video_p75_watched_actions" + - "video_p95_watched_actions" + - "video_play_actions" + - "video_play_curve_actions" + - "video_play_retention_0_to_15s_actions" + - "video_play_retention_20_to_60s_actions" + - "video_play_retention_graph_actions" + - "video_thruplay_watched_actions" + - "video_time_watched_actions" + - "website_ctr" + - "website_purchase_roas" + - "wish_bid" + breakdowns: + title: "Breakdowns" + description: "A list of chosen breakdowns for breakdowns" + default: [] + type: "array" + items: + title: "ValidBreakdowns" + description: "An enumeration." + enum: + - "ad_format_asset" + - "age" + - "app_id" + - "body_asset" + - "call_to_action_asset" + - "coarse_conversion_value" + - "country" + - "description_asset" + - "device_platform" + - "dma" + - "fidelity_type" + - "frequency_value" + - "gender" + - "hourly_stats_aggregated_by_advertiser_time_zone" + - "hourly_stats_aggregated_by_audience_time_zone" + - "hsid" + - "image_asset" + - "impression_device" + - "is_conversion_id_modeled" + - "landing_destination" + - "link_url_asset" + - "marketing_messages_btn_name" + - "mdsa_landing_destination" + - "media_asset_url" + - "media_creator" + - "media_destination_url" + - "media_format" + - "media_origin_url" + - "media_text_content" + - "mmm" + - "place_page_id" + - "platform_position" + - "postback_sequence_index" + - "product_id" + - "publisher_platform" + - "redownload" + - "region" + - "skan_campaign_id" + - "skan_conversion_id" + - "skan_version" + - "standard_event_content_type" + - "title_asset" + - "video_asset" + action_breakdowns: + title: "Action Breakdowns" + description: "A list of chosen action_breakdowns for action_breakdowns" + default: [] + type: "array" + items: + title: "ValidActionBreakdowns" + description: "An enumeration." + enum: + - "action_canvas_component_name" + - "action_carousel_card_id" + - "action_carousel_card_name" + - "action_destination" + - "action_device" + - "action_reaction" + - "action_target_id" + - "action_type" + - "action_video_sound" + - "action_video_type" + - "standard_event_content_type" + action_report_time: + title: "Action Report Time" + description: + "Determines the report time of action stats. For example,\ + \ if a person saw the ad on Jan 1st but converted on Jan 2nd, when\ + \ you query the API with action_report_time=impression, you see\ + \ a conversion on Jan 1st. When you query the API with action_report_time=conversion,\ + \ you see a conversion on Jan 2nd." + default: "mixed" + enum: + - "conversion" + - "impression" + - "mixed" + type: "string" + time_increment: + title: "Time Increment" + description: + "Time window in days by which to aggregate statistics.\ + \ The sync will be chunked into N day intervals, where N is the\ + \ number of days you specified. For example, if you set this value\ + \ to 7, then all statistics will be reported as 7-day aggregates\ + \ by starting from the start_date. If the start and end dates are\ + \ October 1st and October 30th, then the connector will output 5\ + \ records: 01 - 06, 07 - 13, 14 - 20, 21 - 27, and 28 - 30 (3 days\ + \ only). The minimum allowed value for this field is 1, and the\ + \ maximum is 89." + default: 1 + maximum: 89 + minimum: 1 + exclusiveMinimum: 0 + type: "integer" + start_date: + title: "Start Date" + description: + "The date from which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-25T00:00:00Z" + type: "string" + format: "date-time" + end_date: + title: "End Date" + description: + "The date until which you'd like to replicate data for\ + \ this stream, in the format YYYY-MM-DDT00:00:00Z. All data generated\ + \ between the start date and this end date will be replicated. Not\ + \ setting this option will result in always syncing the latest data." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2017-01-26T00:00:00Z" + type: "string" + format: "date-time" + insights_lookback_window: + title: "Custom Insights Lookback Window" + description: "The attribution window" + default: 28 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Custom Insights Job Timeout" + description: "The insights job timeout" + default: 60 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + page_size: + title: "Page Size of Requests" + description: + "Page size used when sending requests to Facebook API to specify\ + \ number of records per page when response has pagination. Most users\ + \ do not need to set this field unless they specifically need to tune\ + \ the connector to address specific issues or use cases." + default: 100 + order: 10 + exclusiveMinimum: 0 + type: "integer" + insights_lookback_window: + title: "Insights Lookback Window" + description: + "The attribution window. Facebook freezes insight data 28 days\ + \ after it was generated, which means that all data from the past 28 days\ + \ may have changed since we last emitted it, so you can retrieve refreshed\ + \ insights from the past by setting this parameter. If you set a custom\ + \ lookback window value in Facebook account, please provide the same value\ + \ here." + default: 28 + order: 11 + maximum: 28 + mininum: 1 + exclusiveMinimum: 0 + type: "integer" + insights_job_timeout: + title: "Insights Job Timeout" + description: + "Insights Job Timeout establishes the maximum amount of time\ + \ (in minutes) of waiting for the report job to complete. When timeout\ + \ is reached the job is considered failed and we are trying to request\ + \ smaller amount of data by breaking the job to few smaller ones. If you\ + \ definitely know that 60 minutes is not enough for your report to be\ + \ processed then you can decrease the timeout value, so we start breaking\ + \ job to smaller parts faster." + default: 60 + order: 12 + maximum: 60 + mininum: 10 + exclusiveMinimum: 0 + type: "integer" + required: + - "account_ids" + - "credentials" + source-recruitee: + type: "object" + required: + - "api_key" + - "company_id" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recruitee API Key. See here." + order: 0 + x-speakeasy-param-sensitive: true + company_id: + type: "integer" + title: "Company ID" + description: + "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + order: 1 + sourceType: + title: "recruitee" + const: "recruitee" + enum: + - "recruitee" + order: 0 + type: "string" + source-recruitee-update: + type: "object" + required: + - "api_key" + - "company_id" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Recruitee API Key. See here." + order: 0 + company_id: + type: "integer" + title: "Company ID" + description: + "Recruitee Company ID. You can also find this ID on the Recruitee API\ + \ tokens page." + order: 1 + source-airbyte: + type: "object" + required: + - "start_date" + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 1 + title: "client_id" + start_date: + type: "string" + order: 0 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_secret: + type: "string" + order: 2 + title: "client_secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "airbyte" + const: "airbyte" + enum: + - "airbyte" + order: 0 + type: "string" + source-airbyte-update: + type: "object" + required: + - "start_date" + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 1 + title: "client_id" + start_date: + type: "string" + order: 0 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_secret: + type: "string" + order: 2 + title: "client_secret" + airbyte_secret: true + source-survey-sparrow: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + description: + "Your access token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + region: + type: "object" + title: "Base URL" + description: + "Is your account location is EU based? If yes, the base url\ + \ to retrieve data will be different." + oneOf: + - type: "object" + title: "EU-based account" + properties: + url_base: + type: "string" + const: "https://eu-api.surveysparrow.com/v3" + enum: + - "https://eu-api.surveysparrow.com/v3" + - type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + default: + type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + order: 1 + survey_id: + type: "array" + description: "A List of your survey ids for survey-specific stream" + order: 2 + sourceType: + title: "survey-sparrow" + const: "survey-sparrow" + enum: + - "survey-sparrow" + order: 0 + type: "string" + source-survey-sparrow-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + description: + "Your access token. See here. The key is case sensitive." + airbyte_secret: true + order: 0 + region: + type: "object" + title: "Base URL" + description: + "Is your account location is EU based? If yes, the base url\ + \ to retrieve data will be different." + oneOf: + - type: "object" + title: "EU-based account" + properties: + url_base: + type: "string" + const: "https://eu-api.surveysparrow.com/v3" + enum: + - "https://eu-api.surveysparrow.com/v3" + - type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + default: + type: "object" + title: "Global account" + properties: + url_base: + type: "string" + const: "https://api.surveysparrow.com/v3" + enum: + - "https://api.surveysparrow.com/v3" + order: 1 + survey_id: + type: "array" + description: "A List of your survey ids for survey-specific stream" + order: 2 + source-azure-table: + title: "Azure Data Table Spec" + type: "object" + required: + - "storage_account_name" + - "storage_access_key" + - "sourceType" + properties: + storage_account_name: + title: "Account Name" + type: "string" + description: "The name of your storage account." + order: 0 + airbyte_secret: false + x-speakeasy-param-sensitive: true + storage_access_key: + title: "Access Key" + type: "string" + description: + "Azure Table Storage Access Key. See the docs for more information on how to obtain this key." + order: 1 + airbyte_secret: true + x-speakeasy-param-sensitive: true + storage_endpoint_suffix: + title: "Endpoint Suffix" + type: "string" + description: + "Azure Table Storage service account URL suffix. See the docs\ + \ for more information on how to obtain endpoint suffix" + order: 2 + default: "core.windows.net" + examples: + - "core.windows.net" + - "core.chinacloudapi.cn" + airbyte_secret: false + x-speakeasy-param-sensitive: true + sourceType: + title: "azure-table" + const: "azure-table" + enum: + - "azure-table" + order: 0 + type: "string" + source-azure-table-update: + title: "Azure Data Table Spec" + type: "object" + required: + - "storage_account_name" + - "storage_access_key" + properties: + storage_account_name: + title: "Account Name" + type: "string" + description: "The name of your storage account." + order: 0 + airbyte_secret: false + storage_access_key: + title: "Access Key" + type: "string" + description: + "Azure Table Storage Access Key. See the docs for more information on how to obtain this key." + order: 1 + airbyte_secret: true + storage_endpoint_suffix: + title: "Endpoint Suffix" + type: "string" + description: + "Azure Table Storage service account URL suffix. See the docs\ + \ for more information on how to obtain endpoint suffix" + order: 2 + default: "core.windows.net" + examples: + - "core.windows.net" + - "core.chinacloudapi.cn" + airbyte_secret: false + source-customer-io: + type: "object" + required: + - "app_api_key" + - "sourceType" + properties: + app_api_key: + type: "string" + title: "Customer.io App API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "customer-io" + const: "customer-io" + enum: + - "customer-io" + order: 0 + type: "string" + source-customer-io-update: + type: "object" + required: + - "app_api_key" + properties: + app_api_key: + type: "string" + title: "Customer.io App API Key" + airbyte_secret: true + order: 0 + source-surveymonkey: + type: "object" + required: + - "start_date" + - "credentials" + - "sourceType" + properties: + origin: + type: "string" + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: + "Depending on the originating datacenter of the SurveyMonkey\ + \ account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: "object" + required: + - "auth_method" + - "access_token" + order: 2 + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + order: 3 + type: "string" + airbyte_secret: true + description: + "Access Token for making authenticated requests. See the\ + \ docs for information on how to generate this key." + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + order: 3 + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: "array" + order: 1000 + items: + type: "string" + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all boards to which you have access will be\ + \ replicated." + sourceType: + title: "surveymonkey" + const: "surveymonkey" + enum: + - "surveymonkey" + order: 0 + type: "string" + source-surveymonkey-update: + type: "object" + required: + - "start_date" + - "credentials" + properties: + origin: + type: "string" + order: 1 + enum: + - "USA" + - "Europe" + - "Canada" + default: "USA" + title: "Origin datacenter of the SurveyMonkey account" + description: + "Depending on the originating datacenter of the SurveyMonkey\ + \ account, the API access URL may be different." + credentials: + title: "SurveyMonkey Authorization Method" + description: "The authorization method to use to retrieve data from SurveyMonkey" + type: "object" + required: + - "auth_method" + - "access_token" + order: 2 + properties: + auth_method: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + access_token: + title: "Access Token" + order: 3 + type: "string" + airbyte_secret: true + description: + "Access Token for making authenticated requests. See the\ + \ docs for information on how to generate this key." + start_date: + title: "Start Date" + order: 3 + type: "string" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z?$" + examples: + - "2021-01-01T00:00:00Z" + format: "date-time" + survey_ids: + type: "array" + order: 1000 + items: + type: "string" + pattern: "^[0-9]{8,9}$" + title: "Survey Monkey survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all boards to which you have access will be\ + \ replicated." + source-persistiq: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "PersistIq API Key. See the docs for more information on where to find that key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "persistiq" + const: "persistiq" + enum: + - "persistiq" + order: 0 + type: "string" + source-persistiq-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "PersistIq API Key. See the docs for more information on where to find that key." + airbyte_secret: true + order: 0 + source-configcat: + type: "object" + required: + - "username" + - "password" + - "sourceType" + properties: + username: + type: "string" + description: + "Basic auth user name. See here." + title: "Username" + order: 0 + password: + type: "string" + description: + "Basic auth password. See here." + title: "Password" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "configcat" + const: "configcat" + enum: + - "configcat" + order: 0 + type: "string" + source-configcat-update: + type: "object" + required: + - "username" + - "password" + properties: + username: + type: "string" + description: + "Basic auth user name. See here." + title: "Username" + order: 0 + password: + type: "string" + description: + "Basic auth password. See here." + title: "Password" + airbyte_secret: true + order: 1 + source-reddit: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + query: + type: "string" + description: "Specifies the query for searching in reddits and subreddits" + order: 1 + title: "Query" + default: "airbyte" + include_over_18: + type: "boolean" + description: "Includes mature content" + order: 2 + title: "Include over 18 flag" + default: false + exact: + type: "boolean" + description: "Specifies exact keyword and reduces distractions" + order: 3 + title: "Exact" + limit: + type: "number" + description: "Max records per page limit" + order: 4 + title: "Limit" + default: "1000" + subreddits: + type: "array" + description: "Subreddits for exploration" + order: 5 + title: "Subreddits" + default: + - "r/funny" + - "r/AskReddit" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "reddit" + const: "reddit" + enum: + - "reddit" + order: 0 + type: "string" + source-reddit-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + query: + type: "string" + description: "Specifies the query for searching in reddits and subreddits" + order: 1 + title: "Query" + default: "airbyte" + include_over_18: + type: "boolean" + description: "Includes mature content" + order: 2 + title: "Include over 18 flag" + default: false + exact: + type: "boolean" + description: "Specifies exact keyword and reduces distractions" + order: 3 + title: "Exact" + limit: + type: "number" + description: "Max records per page limit" + order: 4 + title: "Limit" + default: "1000" + subreddits: + type: "array" + description: "Subreddits for exploration" + order: 5 + title: "Subreddits" + default: + - "r/funny" + - "r/AskReddit" + start_date: + type: "string" + order: 6 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-insightly: + type: "object" + required: + - "start_date" + - "token" + - "sourceType" + properties: + start_date: + type: + - "string" + - "null" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Insightly\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only for incremental streams." + examples: + - "2021-03-01T00:00:00Z" + order: 0 + token: + type: + - "string" + - "null" + title: "API Token" + description: "Your Insightly API token." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + sourceType: + title: "insightly" + const: "insightly" + enum: + - "insightly" + order: 0 + type: "string" + source-insightly-update: + type: "object" + required: + - "start_date" + - "token" + properties: + start_date: + type: + - "string" + - "null" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "The date from which you'd like to replicate data for Insightly\ + \ in the format YYYY-MM-DDT00:00:00Z. All data generated after this date\ + \ will be replicated. Note that it will be used only for incremental streams." + examples: + - "2021-03-01T00:00:00Z" + order: 0 + token: + type: + - "string" + - "null" + title: "API Token" + description: "Your Insightly API token." + airbyte_secret: true + order: 1 + source-cart: + title: "Cart.com Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Central API Router" + type: "object" + order: 0 + required: + - "auth_type" + - "user_name" + - "user_secret" + - "site_id" + properties: + auth_type: + type: "string" + const: "CENTRAL_API_ROUTER" + order: 0 + enum: + - "CENTRAL_API_ROUTER" + user_name: + type: "string" + title: "User Name" + description: "Enter your application's User Name" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + user_secret: + type: "string" + title: "User Secret" + description: "Enter your application's User Secret" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + site_id: + type: "string" + title: "Site ID" + description: + "You can determine a site provisioning site Id by hitting\ + \ https://site.com/store/sitemonitor.aspx and reading the response\ + \ param PSID" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + - title: "Single Store Access Token" + type: "object" + order: 1 + required: + - "auth_type" + - "access_token" + - "store_name" + properties: + auth_type: + type: "string" + const: "SINGLE_STORE_ACCESS_TOKEN" + order: 0 + enum: + - "SINGLE_STORE_ACCESS_TOKEN" + access_token: + type: "string" + title: "Access Token" + airbyte_secret: true + order: 1 + description: "Access Token for making authenticated requests." + x-speakeasy-param-sensitive: true + store_name: + type: "string" + title: "Store Name" + order: 2 + description: + "The name of Cart.com Online Store. All API URLs start\ + \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\ + \ is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + sourceType: + title: "cart" + const: "cart" + enum: + - "cart" + order: 0 + type: "string" + source-cart-update: + title: "Cart.com Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Central API Router" + type: "object" + order: 0 + required: + - "auth_type" + - "user_name" + - "user_secret" + - "site_id" + properties: + auth_type: + type: "string" + const: "CENTRAL_API_ROUTER" + order: 0 + enum: + - "CENTRAL_API_ROUTER" + user_name: + type: "string" + title: "User Name" + description: "Enter your application's User Name" + airbyte_secret: true + order: 1 + user_secret: + type: "string" + title: "User Secret" + description: "Enter your application's User Secret" + airbyte_secret: true + order: 2 + site_id: + type: "string" + title: "Site ID" + description: + "You can determine a site provisioning site Id by hitting\ + \ https://site.com/store/sitemonitor.aspx and reading the response\ + \ param PSID" + airbyte_secret: true + order: 3 + - title: "Single Store Access Token" + type: "object" + order: 1 + required: + - "auth_type" + - "access_token" + - "store_name" + properties: + auth_type: + type: "string" + const: "SINGLE_STORE_ACCESS_TOKEN" + order: 0 + enum: + - "SINGLE_STORE_ACCESS_TOKEN" + access_token: + type: "string" + title: "Access Token" + airbyte_secret: true + order: 1 + description: "Access Token for making authenticated requests." + store_name: + type: "string" + title: "Store Name" + order: 2 + description: + "The name of Cart.com Online Store. All API URLs start\ + \ with https://[mystorename.com]/api/v1/, where [mystorename.com]\ + \ is the domain name of your store." + start_date: + title: "Start Date" + type: "string" + description: "The date from which you'd like to replicate the data" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + source-oracle: + title: "Oracle Source Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sourceType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 1 + port: + title: "Port" + description: + "Port of the database.\nOracle Corporations recommends the\ + \ following port numbers:\n1521 - Default listening port for client connections\ + \ to the listener. \n2484 - Recommended and officially registered listening\ + \ port for client connections to the listener using TCP/IP with SSL" + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + order: 2 + connection_data: + title: "Connect by" + type: "object" + description: "Connect data that will be used for DB connection" + order: 3 + oneOf: + - title: "Service name" + description: "Use service name" + required: + - "service_name" + properties: + connection_type: + type: "string" + const: "service_name" + order: 0 + enum: + - "service_name" + service_name: + title: "Service name" + type: "string" + order: 1 + - title: "System ID (SID)" + description: "Use SID (Oracle System Identifier)" + required: + - "sid" + properties: + connection_type: + type: "string" + const: "sid" + order: 0 + enum: + - "sid" + sid: + title: "System ID (SID)" + type: "string" + order: 1 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 1 + uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method with is used when communicating with\ + \ the database." + order: 8 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines what encryption algorithm is\ + \ used." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM File" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "oracle" + const: "oracle" + enum: + - "oracle" + order: 0 + type: "string" + source-oracle-update: + title: "Oracle Source Spec" + type: "object" + required: + - "host" + - "port" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 1 + port: + title: "Port" + description: + "Port of the database.\nOracle Corporations recommends the\ + \ following port numbers:\n1521 - Default listening port for client connections\ + \ to the listener. \n2484 - Recommended and officially registered listening\ + \ port for client connections to the listener using TCP/IP with SSL" + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + order: 2 + connection_data: + title: "Connect by" + type: "object" + description: "Connect data that will be used for DB connection" + order: 3 + oneOf: + - title: "Service name" + description: "Use service name" + required: + - "service_name" + properties: + connection_type: + type: "string" + const: "service_name" + order: 0 + enum: + - "service_name" + service_name: + title: "Service name" + type: "string" + order: 1 + - title: "System ID (SID)" + description: "Use SID (Oracle System Identifier)" + required: + - "sid" + properties: + connection_type: + type: "string" + const: "sid" + order: 0 + enum: + - "sid" + sid: + title: "System ID (SID)" + type: "string" + order: 1 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 1 + uniqueItems: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method with is used when communicating with\ + \ the database." + order: 8 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + encryption_algorithm: + type: "string" + description: + "This parameter defines what encryption algorithm is\ + \ used." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM File" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + order: 4 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-appfollow: + type: "object" + required: + - "sourceType" + properties: + api_secret: + type: "string" + description: "API Key provided by Appfollow" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "appfollow" + const: "appfollow" + enum: + - "appfollow" + order: 0 + type: "string" + source-appfollow-update: + type: "object" + required: [] + properties: + api_secret: + type: "string" + description: "API Key provided by Appfollow" + title: "API Key" + airbyte_secret: true + order: 0 + source-chartmogul: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your Chartmogul API key. See the docs for info on how to obtain this." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. When\ + \ feasible, any data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 1 + format: "date-time" + sourceType: + title: "chartmogul" + const: "chartmogul" + enum: + - "chartmogul" + order: 0 + type: "string" + source-chartmogul-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API key" + description: + "Your Chartmogul API key. See the docs for info on how to obtain this." + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. When\ + \ feasible, any data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 1 + format: "date-time" + source-coinmarketcap: + type: "object" + required: + - "api_key" + - "data_type" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Your API Key. See here. The token is case sensitive." + order: 0 + x-speakeasy-param-sensitive: true + data_type: + type: "string" + title: "Data type" + enum: + - "latest" + - "historical" + description: + "/latest: Latest market ticker quotes and averages for cryptocurrencies\ + \ and exchanges. /historical: Intervals of historic market data like OHLCV\ + \ data or data for use in charting libraries. See here." + order: 1 + symbols: + type: "array" + title: "Symbol" + items: + type: "string" + description: "Cryptocurrency symbols. (only used for quotes stream)" + minItems: 1 + examples: + - "AVAX" + - "BTC" + order: 2 + sourceType: + title: "coinmarketcap" + const: "coinmarketcap" + enum: + - "coinmarketcap" + order: 0 + type: "string" + source-coinmarketcap-update: + type: "object" + required: + - "api_key" + - "data_type" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + description: + "Your API Key. See here. The token is case sensitive." + order: 0 + data_type: + type: "string" + title: "Data type" + enum: + - "latest" + - "historical" + description: + "/latest: Latest market ticker quotes and averages for cryptocurrencies\ + \ and exchanges. /historical: Intervals of historic market data like OHLCV\ + \ data or data for use in charting libraries. See here." + order: 1 + symbols: + type: "array" + title: "Symbol" + items: + type: "string" + description: "Cryptocurrency symbols. (only used for quotes stream)" + minItems: 1 + examples: + - "AVAX" + - "BTC" + order: 2 + source-dixa: + type: "object" + required: + - "api_token" + - "start_date" + - "sourceType" + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The connector pulls records updated from this date onwards." + examples: + - "YYYY-MM-DD" + order: 3 + sourceType: + title: "dixa" + const: "dixa" + enum: + - "dixa" + order: 0 + type: "string" + source-dixa-update: + type: "object" + required: + - "api_token" + - "start_date" + properties: + api_token: + type: "string" + description: "Dixa API token" + airbyte_secret: true + order: 1 + batch_size: + type: "integer" + description: "Number of days to batch into one request. Max 31." + pattern: "^[0-9]{1,2}$" + examples: + - 1 + - 31 + default: 31 + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The connector pulls records updated from this date onwards." + examples: + - "YYYY-MM-DD" + order: 3 + source-freshcaller: + title: "Freshcaller Spec" + type: "object" + required: + - "domain" + - "api_key" + - "sourceType" + properties: + domain: + type: "string" + title: "Domain for Freshcaller account" + description: "Used to construct Base URL for the Freshcaller APIs" + examples: + - "snaptravel" + api_key: + type: "string" + title: "API Key" + description: + "Freshcaller API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + description: + "UTC date and time. Any data created after this date will be\ + \ replicated." + format: "date-time" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-01-01T12:00:00Z" + sync_lag_minutes: + title: "Lag in minutes for each sync" + type: "integer" + description: + "Lag in minutes for each sync, i.e., at time T, data for the\ + \ time range [prev_sync_time, T-30] will be fetched" + sourceType: + title: "freshcaller" + const: "freshcaller" + enum: + - "freshcaller" + order: 0 + type: "string" + source-freshcaller-update: + title: "Freshcaller Spec" + type: "object" + required: + - "domain" + - "api_key" + properties: + domain: + type: "string" + title: "Domain for Freshcaller account" + description: "Used to construct Base URL for the Freshcaller APIs" + examples: + - "snaptravel" + api_key: + type: "string" + title: "API Key" + description: + "Freshcaller API Key. See the docs for more information on how to obtain this key." + airbyte_secret: true + requests_per_minute: + title: "Requests per minute" + type: "integer" + description: + "The number of requests per minute that this source allowed\ + \ to use. There is a rate limit of 50 requests per minute per app per\ + \ account." + start_date: + title: "Start Date" + description: + "UTC date and time. Any data created after this date will be\ + \ replicated." + format: "date-time" + type: "string" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2022-01-01T12:00:00Z" + sync_lag_minutes: + title: "Lag in minutes for each sync" + type: "integer" + description: + "Lag in minutes for each sync, i.e., at time T, data for the\ + \ time range [prev_sync_time, T-30] will be fetched" + source-recharge: + title: "Recharge Spec" + type: "object" + required: + - "start_date" + - "access_token" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Recharge\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will\ + \ not be replicated." + examples: + - "2021-05-14T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the Access Token generated. See the docs for\ + \ more information." + airbyte_secret: true + x-speakeasy-param-sensitive: true + use_orders_deprecated_api: + type: "boolean" + title: "Use `Orders` Deprecated API" + description: + "Define whether or not the `Orders` stream should use the deprecated\ + \ `2021-01` API version, or use `2021-11`, otherwise." + default: true + sourceType: + title: "recharge" + const: "recharge" + enum: + - "recharge" + order: 0 + type: "string" + source-recharge-update: + title: "Recharge Spec" + type: "object" + required: + - "start_date" + - "access_token" + properties: + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Recharge\ + \ API, in the format YYYY-MM-DDT00:00:00Z. Any data before this date will\ + \ not be replicated." + examples: + - "2021-05-14T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + format: "date-time" + access_token: + type: "string" + title: "Access Token" + description: + "The value of the Access Token generated. See the docs for\ + \ more information." + airbyte_secret: true + use_orders_deprecated_api: + type: "boolean" + title: "Use `Orders` Deprecated API" + description: + "Define whether or not the `Orders` stream should use the deprecated\ + \ `2021-01` API version, or use `2021-11`, otherwise." + default: true + source-aha: + type: "object" + required: + - "api_key" + - "url" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Bearer Token" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + order: 1 + sourceType: + title: "aha" + const: "aha" + enum: + - "aha" + order: 0 + type: "string" + source-aha-update: + type: "object" + required: + - "api_key" + - "url" + properties: + api_key: + type: "string" + title: "API Bearer Token" + airbyte_secret: true + description: "API Key" + order: 0 + url: + type: "string" + description: "URL" + title: "Aha Url Instance" + order: 1 + source-brevo: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "brevo" + const: "brevo" + enum: + - "brevo" + order: 0 + type: "string" + source-brevo-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-datascope: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "Authorization" + airbyte_secret: true + description: "API Key" + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + order: 1 + sourceType: + title: "datascope" + const: "datascope" + enum: + - "datascope" + order: 0 + type: "string" + source-datascope-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "Authorization" + airbyte_secret: true + description: "API Key" + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date for the data to be replicated" + examples: + - "dd/mm/YYYY HH:MM" + pattern: "^[0-9]{2}/[0-9]{2}/[0-9]{4} [0-9]{2}:[0-9]{2}$" + order: 1 + source-metabase: + type: "object" + required: + - "instance_api_url" + - "username" + - "sourceType" + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "https://localhost:3000/api/" + pattern: "^https://" + order: 0 + username: + type: "string" + title: "Username" + order: 1 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + session_token: + type: "string" + description: + "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + sourceType: + title: "metabase" + const: "metabase" + enum: + - "metabase" + order: 0 + type: "string" + source-metabase-update: + type: "object" + required: + - "instance_api_url" + - "username" + properties: + instance_api_url: + type: "string" + title: "Metabase Instance API URL" + description: "URL to your metabase instance API" + examples: + - "https://localhost:3000/api/" + pattern: "^https://" + order: 0 + username: + type: "string" + title: "Username" + order: 1 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 2 + session_token: + type: "string" + description: + "To generate your session token, you need to run the following\ + \ command: ``` curl -X POST \\\n -H \"Content-Type: application/json\"\ + \ \\\n -d '{\"username\": \"person@metabase.com\", \"password\": \"fakepassword\"\ + }' \\\n http://localhost:3000/api/session\n``` Then copy the value of\ + \ the `id` field returned by a successful call to that API.\nNote that\ + \ by default, sessions are good for 14 days and needs to be regenerated." + airbyte_secret: true + order: 3 + source-bing-ads: + title: "Bing Ads Spec" + type: "object" + required: + - "developer_token" + - "client_id" + - "refresh_token" + - "sourceType" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + tenant_id: + type: "string" + title: "Tenant ID" + description: + "The Tenant ID of your Microsoft Advertising developer application.\ + \ Set this to \"common\" unless you know you need a different value." + airbyte_secret: true + default: "common" + order: 0 + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + developer_token: + type: "string" + title: "Developer Token" + description: + "Developer token associated with user. See more info in the docs." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + account_names: + title: "Account Names Predicates" + description: "Predicates that will be used to sync data by specific accounts." + type: "array" + order: 5 + items: + description: "Account Names Predicates Config." + type: "object" + properties: + operator: + title: "Operator" + description: + "An Operator that will be used to filter accounts. The\ + \ Contains predicate has features for matching words, matching inflectional\ + \ forms of words, searching using wildcard characters, and searching\ + \ using proximity. The Equals is used to return all rows where account\ + \ name is equal(=) to the string that you provided" + type: "string" + enum: + - "Contains" + - "Equals" + name: + title: "Account Name" + description: + "Account Name is a string value for comparing with the\ + \ specified predicate." + type: "string" + required: + - "operator" + - "name" + reports_start_date: + type: "string" + title: "Reports replication start date" + format: "date" + description: + "The start date from which to begin replicating report data.\ + \ Any data generated before this date will not be replicated in reports.\ + \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\ + \ and current calendar year will be replicated." + order: 6 + lookback_window: + title: "Lookback window" + description: + "Also known as attribution or conversion window. How far into\ + \ the past to look for records (in days). If your conversion window has\ + \ an hours/minutes granularity, round it up to the number of days exceeding.\ + \ Used only for performance report streams in incremental mode without\ + \ specified Reports Start Date." + type: "integer" + default: 0 + minimum: 0 + maximum: 90 + order: 7 + custom_reports: + title: "Custom Reports" + description: "You can add your Custom Bing Ads report by creating one." + order: 8 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Report Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + examples: + - "Account Performance" + - "AdDynamicTextPerformanceReport" + - "custom report" + reporting_object: + title: "Reporting Data Object" + description: + "The name of the the object derives from the ReportRequest\ + \ object. You can find it in Bing Ads Api docs - Reporting API -\ + \ Reporting Data Objects." + type: "string" + enum: + - "AccountPerformanceReportRequest" + - "AdDynamicTextPerformanceReportRequest" + - "AdExtensionByAdReportRequest" + - "AdExtensionByKeywordReportRequest" + - "AdExtensionDetailReportRequest" + - "AdGroupPerformanceReportRequest" + - "AdPerformanceReportRequest" + - "AgeGenderAudienceReportRequest" + - "AudiencePerformanceReportRequest" + - "CallDetailReportRequest" + - "CampaignPerformanceReportRequest" + - "ConversionPerformanceReportRequest" + - "DestinationUrlPerformanceReportRequest" + - "DSAAutoTargetPerformanceReportRequest" + - "DSACategoryPerformanceReportRequest" + - "DSASearchQueryPerformanceReportRequest" + - "GeographicPerformanceReportRequest" + - "GoalsAndFunnelsReportRequest" + - "HotelDimensionPerformanceReportRequest" + - "HotelGroupPerformanceReportRequest" + - "KeywordPerformanceReportRequest" + - "NegativeKeywordConflictReportRequest" + - "ProductDimensionPerformanceReportRequest" + - "ProductMatchCountReportRequest" + - "ProductNegativeKeywordConflictReportRequest" + - "ProductPartitionPerformanceReportRequest" + - "ProductPartitionUnitPerformanceReportRequest" + - "ProductSearchQueryPerformanceReportRequest" + - "ProfessionalDemographicsAudienceReportRequest" + - "PublisherUsagePerformanceReportRequest" + - "SearchCampaignChangeHistoryReportRequest" + - "SearchQueryPerformanceReportRequest" + - "ShareOfVoiceReportRequest" + - "UserLocationPerformanceReportRequest" + report_columns: + title: "Columns" + description: + "A list of available report object columns. You can find\ + \ it in description of reporting object that you want to add to\ + \ custom report." + type: "array" + items: + description: "Name of report column." + type: "string" + minItems: 1 + report_aggregation: + title: "Aggregation" + description: "A list of available aggregations." + type: "string" + items: + title: "ValidEnums" + description: "An enumeration of aggregations." + enum: + - "Hourly" + - "Daily" + - "Weekly" + - "Monthly" + - "DayOfWeek" + - "HourOfDay" + - "WeeklyStartingMonday" + - "Summary" + default: + - "Hourly" + required: + - "name" + - "reporting_object" + - "report_columns" + - "report_aggregation" + sourceType: + title: "bing-ads" + const: "bing-ads" + enum: + - "bing-ads" + order: 0 + type: "string" + source-bing-ads-update: + title: "Bing Ads Spec" + type: "object" + required: + - "developer_token" + - "client_id" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + tenant_id: + type: "string" + title: "Tenant ID" + description: + "The Tenant ID of your Microsoft Advertising developer application.\ + \ Set this to \"common\" unless you know you need a different value." + airbyte_secret: true + default: "common" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + refresh_token: + type: "string" + title: "Refresh Token" + description: "Refresh Token to renew the expired Access Token." + airbyte_secret: true + order: 3 + developer_token: + type: "string" + title: "Developer Token" + description: + "Developer token associated with user. See more info in the docs." + airbyte_secret: true + order: 4 + account_names: + title: "Account Names Predicates" + description: "Predicates that will be used to sync data by specific accounts." + type: "array" + order: 5 + items: + description: "Account Names Predicates Config." + type: "object" + properties: + operator: + title: "Operator" + description: + "An Operator that will be used to filter accounts. The\ + \ Contains predicate has features for matching words, matching inflectional\ + \ forms of words, searching using wildcard characters, and searching\ + \ using proximity. The Equals is used to return all rows where account\ + \ name is equal(=) to the string that you provided" + type: "string" + enum: + - "Contains" + - "Equals" + name: + title: "Account Name" + description: + "Account Name is a string value for comparing with the\ + \ specified predicate." + type: "string" + required: + - "operator" + - "name" + reports_start_date: + type: "string" + title: "Reports replication start date" + format: "date" + description: + "The start date from which to begin replicating report data.\ + \ Any data generated before this date will not be replicated in reports.\ + \ This is a UTC date in YYYY-MM-DD format. If not set, data from previous\ + \ and current calendar year will be replicated." + order: 6 + lookback_window: + title: "Lookback window" + description: + "Also known as attribution or conversion window. How far into\ + \ the past to look for records (in days). If your conversion window has\ + \ an hours/minutes granularity, round it up to the number of days exceeding.\ + \ Used only for performance report streams in incremental mode without\ + \ specified Reports Start Date." + type: "integer" + default: 0 + minimum: 0 + maximum: 90 + order: 7 + custom_reports: + title: "Custom Reports" + description: "You can add your Custom Bing Ads report by creating one." + order: 8 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Report Name" + description: + "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + examples: + - "Account Performance" + - "AdDynamicTextPerformanceReport" + - "custom report" + reporting_object: + title: "Reporting Data Object" + description: + "The name of the the object derives from the ReportRequest\ + \ object. You can find it in Bing Ads Api docs - Reporting API -\ + \ Reporting Data Objects." + type: "string" + enum: + - "AccountPerformanceReportRequest" + - "AdDynamicTextPerformanceReportRequest" + - "AdExtensionByAdReportRequest" + - "AdExtensionByKeywordReportRequest" + - "AdExtensionDetailReportRequest" + - "AdGroupPerformanceReportRequest" + - "AdPerformanceReportRequest" + - "AgeGenderAudienceReportRequest" + - "AudiencePerformanceReportRequest" + - "CallDetailReportRequest" + - "CampaignPerformanceReportRequest" + - "ConversionPerformanceReportRequest" + - "DestinationUrlPerformanceReportRequest" + - "DSAAutoTargetPerformanceReportRequest" + - "DSACategoryPerformanceReportRequest" + - "DSASearchQueryPerformanceReportRequest" + - "GeographicPerformanceReportRequest" + - "GoalsAndFunnelsReportRequest" + - "HotelDimensionPerformanceReportRequest" + - "HotelGroupPerformanceReportRequest" + - "KeywordPerformanceReportRequest" + - "NegativeKeywordConflictReportRequest" + - "ProductDimensionPerformanceReportRequest" + - "ProductMatchCountReportRequest" + - "ProductNegativeKeywordConflictReportRequest" + - "ProductPartitionPerformanceReportRequest" + - "ProductPartitionUnitPerformanceReportRequest" + - "ProductSearchQueryPerformanceReportRequest" + - "ProfessionalDemographicsAudienceReportRequest" + - "PublisherUsagePerformanceReportRequest" + - "SearchCampaignChangeHistoryReportRequest" + - "SearchQueryPerformanceReportRequest" + - "ShareOfVoiceReportRequest" + - "UserLocationPerformanceReportRequest" + report_columns: + title: "Columns" + description: + "A list of available report object columns. You can find\ + \ it in description of reporting object that you want to add to\ + \ custom report." + type: "array" + items: + description: "Name of report column." + type: "string" + minItems: 1 + report_aggregation: + title: "Aggregation" + description: "A list of available aggregations." + type: "string" + items: + title: "ValidEnums" + description: "An enumeration of aggregations." + enum: + - "Hourly" + - "Daily" + - "Weekly" + - "Monthly" + - "DayOfWeek" + - "HourOfDay" + - "WeeklyStartingMonday" + - "Summary" + default: + - "Hourly" + required: + - "name" + - "reporting_object" + - "report_columns" + - "report_aggregation" + source-monday: + title: "Monday Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + subdomain: + type: "string" + title: "Subdomain/Slug" + description: + "Slug/subdomain of the account, or the first part of\ + \ the URL that comes before .monday.com" + default: "" + order: 0 + auth_type: + type: "string" + const: "oauth2.0" + order: 1 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: "API Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "monday" + const: "monday" + enum: + - "monday" + order: 0 + type: "string" + source-monday-update: + title: "Monday Spec" + type: "object" + required: [] + properties: + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "access_token" + properties: + subdomain: + type: "string" + title: "Subdomain/Slug" + description: + "Slug/subdomain of the account, or the first part of\ + \ the URL that comes before .monday.com" + default: "" + order: 0 + auth_type: + type: "string" + const: "oauth2.0" + order: 1 + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_type" + - "api_token" + properties: + auth_type: + type: "string" + const: "api_token" + order: 0 + enum: + - "api_token" + api_token: + type: "string" + title: "Personal API Token" + description: "API Token for making authenticated requests." + airbyte_secret: true + source-algolia: + type: "object" + required: + - "api_key" + - "application_id" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + application_id: + type: "string" + description: "The application ID for your application found in settings" + order: 1 + title: "Application ID" + search_query: + type: "string" + description: + "Search query to be used with indexes_query stream with format\ + \ defined in `https://www.algolia.com/doc/rest-api/search/#tag/Search/operation/searchSingleIndex`" + order: 2 + title: "Indexes Search query" + default: "hitsPerPage=2&getRankingInfo=1" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + object_id: + type: "string" + description: "Object ID within index for search queries" + order: 4 + title: "Object ID" + default: "ecommerce-sample-data-9999996" + sourceType: + title: "algolia" + const: "algolia" + enum: + - "algolia" + order: 0 + type: "string" + source-algolia-update: + type: "object" + required: + - "api_key" + - "application_id" + - "start_date" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + application_id: + type: "string" + description: "The application ID for your application found in settings" + order: 1 + title: "Application ID" + search_query: + type: "string" + description: + "Search query to be used with indexes_query stream with format\ + \ defined in `https://www.algolia.com/doc/rest-api/search/#tag/Search/operation/searchSingleIndex`" + order: 2 + title: "Indexes Search query" + default: "hitsPerPage=2&getRankingInfo=1" + start_date: + type: "string" + order: 3 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + object_id: + type: "string" + description: "Object ID within index for search queries" + order: 4 + title: "Object ID" + default: "ecommerce-sample-data-9999996" + source-amplitude: + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + - "sourceType" + properties: + data_region: + type: "string" + title: "Data region" + description: "Amplitude data region server" + enum: + - "Standard Server" + - "EU Residency Server" + default: "Standard Server" + api_key: + type: "string" + title: "API Key" + description: + "Amplitude API Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + secret_key: + type: "string" + title: "Secret Key" + description: + "Amplitude Secret Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + format: "date-time" + title: "Replication Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + request_time_range: + type: "integer" + title: "Request time range" + description: + "According to Considerations too big time range in request can cause a timeout\ + \ error. In this case, set shorter time interval in hours." + default: 24 + minimum: 1 + maximum: 8760 + sourceType: + title: "amplitude" + const: "amplitude" + enum: + - "amplitude" + order: 0 + type: "string" + source-amplitude-update: + title: "Amplitude Spec" + type: "object" + required: + - "api_key" + - "secret_key" + - "start_date" + properties: + data_region: + type: "string" + title: "Data region" + description: "Amplitude data region server" + enum: + - "Standard Server" + - "EU Residency Server" + default: "Standard Server" + api_key: + type: "string" + title: "API Key" + description: + "Amplitude API Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + secret_key: + type: "string" + title: "Secret Key" + description: + "Amplitude Secret Key. See the setup guide for more information on how to obtain this key." + airbyte_secret: true + start_date: + type: "string" + format: "date-time" + title: "Replication Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2021-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-01-25T00:00:00Z" + request_time_range: + type: "integer" + title: "Request time range" + description: + "According to Considerations too big time range in request can cause a timeout\ + \ error. In this case, set shorter time interval in hours." + default: 24 + minimum: 1 + maximum: 8760 + source-google-pagespeed-insights: + type: "object" + required: + - "categories" + - "strategies" + - "urls" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited\ + \ when using without API Key. Creating and using the API key therefore\ + \ is recommended. The key is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + categories: + type: "array" + items: + type: "string" + enum: + - "accessibility" + - "best-practices" + - "performance" + - "pwa" + - "seo" + title: "Lighthouse Categories" + description: + "Defines which Lighthouse category to run. One or many of:\ + \ \"accessibility\", \"best-practices\", \"performance\", \"pwa\", \"\ + seo\"." + order: 1 + strategies: + type: "array" + items: + type: "string" + enum: + - "desktop" + - "mobile" + title: "Analyses Strategies" + description: + "The analyses strategy to use. Either \"desktop\" or \"mobile\"\ + ." + order: 2 + urls: + type: "array" + items: + type: "string" + pattern: + "^(?:origin:)?(http(s)?:\\/\\/)[\\w.-]+(?:\\.[\\w\\.-]+)+[\\\ + w\\-\\._~:\\/?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$" + title: "URLs to analyse" + description: + "The URLs to retrieve pagespeed information from. The connector\ + \ will attempt to sync PageSpeed reports for all the defined URLs. Format:\ + \ https://(www.)url.domain" + example: "https://example.com" + order: 3 + sourceType: + title: "google-pagespeed-insights" + const: "google-pagespeed-insights" + enum: + - "google-pagespeed-insights" + order: 0 + type: "string" + source-google-pagespeed-insights-update: + type: "object" + required: + - "categories" + - "strategies" + - "urls" + properties: + api_key: + type: "string" + title: "API Key" + description: + "Google PageSpeed API Key. See here. The key is optional - however the API is heavily rate limited\ + \ when using without API Key. Creating and using the API key therefore\ + \ is recommended. The key is case sensitive." + airbyte_secret: true + order: 0 + categories: + type: "array" + items: + type: "string" + enum: + - "accessibility" + - "best-practices" + - "performance" + - "pwa" + - "seo" + title: "Lighthouse Categories" + description: + "Defines which Lighthouse category to run. One or many of:\ + \ \"accessibility\", \"best-practices\", \"performance\", \"pwa\", \"\ + seo\"." + order: 1 + strategies: + type: "array" + items: + type: "string" + enum: + - "desktop" + - "mobile" + title: "Analyses Strategies" + description: + "The analyses strategy to use. Either \"desktop\" or \"mobile\"\ + ." + order: 2 + urls: + type: "array" + items: + type: "string" + pattern: + "^(?:origin:)?(http(s)?:\\/\\/)[\\w.-]+(?:\\.[\\w\\.-]+)+[\\\ + w\\-\\._~:\\/?#\\[\\]@!\\$&'\\(\\)\\*\\+,;=.]+$" + title: "URLs to analyse" + description: + "The URLs to retrieve pagespeed information from. The connector\ + \ will attempt to sync PageSpeed reports for all the defined URLs. Format:\ + \ https://(www.)url.domain" + example: "https://example.com" + order: 3 + source-savvycal: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Go to SavvyCal → Settings → Developer → Personal Tokens and\ + \ make a new token. Then, copy the private key. https://savvycal.com/developers" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "savvycal" + const: "savvycal" + enum: + - "savvycal" + order: 0 + type: "string" + source-savvycal-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Go to SavvyCal → Settings → Developer → Personal Tokens and\ + \ make a new token. Then, copy the private key. https://savvycal.com/developers" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + source-shortcut: + type: "object" + required: + - "api_key_2" + - "start_date" + - "sourceType" + properties: + api_key_2: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + query: + type: "string" + description: "Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`" + title: "Query" + default: "title:Our first Epic" + order: 2 + sourceType: + title: "shortcut" + const: "shortcut" + enum: + - "shortcut" + order: 0 + type: "string" + source-shortcut-update: + type: "object" + required: + - "api_key_2" + - "start_date" + properties: + api_key_2: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + query: + type: "string" + description: "Query for searching as defined in `https://help.shortcut.com/hc/en-us/articles/360000046646-Searching-in-Shortcut-Using-Search-Operators`" + title: "Query" + default: "title:Our first Epic" + order: 2 + source-pipedrive: + title: "Pipedrive Spec" + type: "object" + required: + - "replication_start_date" + - "api_token" + - "sourceType" + properties: + api_token: + title: "API Token" + type: "string" + description: "The Pipedrive API Token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + examples: + - "2017-01-25 00:00:00Z" + type: "string" + sourceType: + title: "pipedrive" + const: "pipedrive" + enum: + - "pipedrive" + order: 0 + type: "string" + source-pipedrive-update: + title: "Pipedrive Spec" + type: "object" + required: + - "replication_start_date" + - "api_token" + properties: + api_token: + title: "API Token" + type: "string" + description: "The Pipedrive API Token." + airbyte_secret: true + replication_start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. When specified and not\ + \ None, then stream will behave as incremental" + examples: + - "2017-01-25 00:00:00Z" + type: "string" + source-amazon-ads: + title: "Amazon Ads Spec" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + x-speakeasy-param-sensitive: true + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + enum: + - "NA" + - "EU" + - "FE" + type: "string" + default: "NA" + order: 4 + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: "string" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + items: + type: "string" + enum: + - "enabled" + - "paused" + - "archived" + type: "array" + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + items: + type: "string" + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "array" + uniqueItems: true + order: 10 + sourceType: + title: "amazon-ads" + const: "amazon-ads" + enum: + - "amazon-ads" + order: 0 + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + source-amazon-ads-update: + title: "Amazon Ads Spec" + type: "object" + properties: + auth_type: + title: "Auth Type" + const: "oauth2.0" + order: 0 + type: "string" + enum: + - "oauth2.0" + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + refresh_token: + title: "Refresh Token" + description: + "Amazon Ads refresh token. See the docs for more information on how to obtain this token." + airbyte_secret: true + order: 3 + type: "string" + region: + title: "Region" + description: + "Region to pull data from (EU/NA/FE). See docs for more details." + enum: + - "NA" + - "EU" + - "FE" + type: "string" + default: "NA" + order: 4 + start_date: + title: "Start Date" + description: + "The Start date for collecting reports, should not be more\ + \ than 60 days in the past. In YYYY-MM-DD format" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + format: "date" + examples: + - "2022-10-10" + - "2022-10-22" + order: 5 + type: "string" + profiles: + title: "Profile IDs" + description: + "Profile IDs you want to fetch data for. The Amazon Ads source\ + \ connector supports only profiles with seller and vendor type, profiles\ + \ with agency type will be ignored. See docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." + order: 6 + type: "array" + items: + type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: + "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" + state_filter: + title: "State Filter" + description: + "Reflects the state of the Display, Product, and Brand Campaign\ + \ streams as enabled, paused, or archived. If you do not populate this\ + \ field, it will be ignored completely." + items: + type: "string" + enum: + - "enabled" + - "paused" + - "archived" + type: "array" + uniqueItems: true + order: 8 + look_back_window: + title: "Look Back Window" + description: + "The amount of days to go back in time to get the updated data\ + \ from Amazon Ads" + examples: + - 3 + - 10 + type: "integer" + default: 3 + order: 9 + report_record_types: + title: "Report Record Types" + description: + "Optional configuration which accepts an array of string of\ + \ record types. Leave blank for default behaviour to pull all report types.\ + \ Use this config option only if you want to pull specific report type(s).\ + \ See docs for more details" + items: + type: "string" + enum: + - "adGroups" + - "asins" + - "asins_keywords" + - "asins_targets" + - "campaigns" + - "keywords" + - "productAds" + - "targets" + type: "array" + uniqueItems: true + order: 10 + required: + - "client_id" + - "client_secret" + - "refresh_token" + source-sendinblue: + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "sendinblue" + const: "sendinblue" + enum: + - "sendinblue" + order: 0 + type: "string" + source-sendinblue-update: + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Your API Key. See here." + title: "API Key" + airbyte_secret: true + order: 0 + source-github: + title: "GitHub Source Spec" + type: "object" + required: + - "credentials" + - "repositories" + - "sourceType" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to GitHub" + type: "object" + order: 0 + group: "auth" + oneOf: + - type: "object" + title: "OAuth" + required: + - "access_token" + properties: + option_title: + type: "string" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "OAuth access token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + const: "PAT Credentials" + order: 0 + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: + "Log into GitHub and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + x-speakeasy-param-sensitive: true + repositories: + type: "array" + items: + type: "string" + pattern: "^[\\w.-]+/(([\\w.-]*\\*)|[\\w.-]+(?docs for more info" + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + order: 2 + format: "date-time" + api_url: + type: "string" + examples: + - "https://github.com" + - "https://github.company.org" + title: "API URL" + default: "https://api.github.com/" + description: + "Please enter your basic URL from self-hosted GitHub instance\ + \ or leave it empty to use GitHub." + order: 3 + branches: + type: "array" + items: + type: "string" + title: "Branches" + examples: + - "airbytehq/airbyte/master" + - "airbytehq/airbyte/my-branch" + description: + "List of GitHub repository branches to pull commits for, e.g.\ + \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\ + \ the default branch will be pulled." + order: 4 + pattern_descriptor: "org/repo/branch1 org/repo/branch2" + max_waiting_time: + type: "integer" + title: "Max Waiting Time (in minutes)" + examples: + - 10 + - 30 + - 60 + default: 10 + minimum: 1 + maximum: 60 + description: + "Max Waiting Time for rate limit. Set higher value to wait\ + \ till rate limits will be resetted to continue sync" + order: 5 + sourceType: + title: "github" + const: "github" + enum: + - "github" + order: 0 + type: "string" + source-github-update: + title: "GitHub Source Spec" + type: "object" + required: + - "credentials" + - "repositories" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to GitHub" + type: "object" + order: 0 + group: "auth" + oneOf: + - type: "object" + title: "OAuth" + required: + - "access_token" + properties: + option_title: + type: "string" + const: "OAuth Credentials" + order: 0 + enum: + - "OAuth Credentials" + access_token: + type: "string" + title: "Access Token" + description: "OAuth access token" + airbyte_secret: true + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + - type: "object" + title: "Personal Access Token" + required: + - "personal_access_token" + properties: + option_title: + type: "string" + const: "PAT Credentials" + order: 0 + enum: + - "PAT Credentials" + personal_access_token: + type: "string" + title: "Personal Access Tokens" + description: + "Log into GitHub and then generate a personal access token. To load balance your API quota consumption\ + \ across multiple API tokens, input multiple tokens separated with\ + \ \",\"" + airbyte_secret: true + repositories: + type: "array" + items: + type: "string" + pattern: "^[\\w.-]+/(([\\w.-]*\\*)|[\\w.-]+(?docs for more info" + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" + order: 2 + format: "date-time" + api_url: + type: "string" + examples: + - "https://github.com" + - "https://github.company.org" + title: "API URL" + default: "https://api.github.com/" + description: + "Please enter your basic URL from self-hosted GitHub instance\ + \ or leave it empty to use GitHub." + order: 3 + branches: + type: "array" + items: + type: "string" + title: "Branches" + examples: + - "airbytehq/airbyte/master" + - "airbytehq/airbyte/my-branch" + description: + "List of GitHub repository branches to pull commits for, e.g.\ + \ `airbytehq/airbyte/master`. If no branches are specified for a repository,\ + \ the default branch will be pulled." + order: 4 + pattern_descriptor: "org/repo/branch1 org/repo/branch2" + max_waiting_time: + type: "integer" + title: "Max Waiting Time (in minutes)" + examples: + - 10 + - 30 + - 60 + default: 10 + minimum: 1 + maximum: 60 + description: + "Max Waiting Time for rate limit. Set higher value to wait\ + \ till rate limits will be resetted to continue sync" + order: 5 + source-guru: + type: "object" + required: + - "username" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + team_id: + type: "string" + description: + "Team ID received through response of /teams streams, make\ + \ sure about access to the team" + order: 3 + title: "team_id" + search_cards_query: + type: "string" + description: "Query for searching cards" + order: 4 + title: "search_cards_query" + sourceType: + title: "guru" + const: "guru" + enum: + - "guru" + order: 0 + type: "string" + source-guru-update: + type: "object" + required: + - "username" + - "start_date" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + start_date: + type: "string" + order: 2 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + team_id: + type: "string" + description: + "Team ID received through response of /teams streams, make\ + \ sure about access to the team" + order: 3 + title: "team_id" + search_cards_query: + type: "string" + description: "Query for searching cards" + order: 4 + title: "search_cards_query" + source-bigquery: + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + - "sourceType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: + "The dataset ID to search for tables and views. If you are\ + \ only loading data from one dataset, setting this option could result\ + \ in much faster schema discovery." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: + "The contents of your Service Account Key JSON file. See the\ + \ docs for more information on how to obtain this key." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "bigquery" + const: "bigquery" + enum: + - "bigquery" + order: 0 + type: "string" + source-bigquery-update: + title: "BigQuery Source Spec" + type: "object" + required: + - "project_id" + - "credentials_json" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + dataset_id: + type: "string" + description: + "The dataset ID to search for tables and views. If you are\ + \ only loading data from one dataset, setting this option could result\ + \ in much faster schema discovery." + title: "Default Dataset ID" + credentials_json: + type: "string" + description: + "The contents of your Service Account Key JSON file. See the\ + \ docs for more information on how to obtain this key." + title: "Credentials JSON" + airbyte_secret: true + source-vantage: + type: "object" + required: + - "access_token" + - "sourceType" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API Access token. See here." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "vantage" + const: "vantage" + enum: + - "vantage" + order: 0 + type: "string" + source-vantage-update: + type: "object" + required: + - "access_token" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API Access token. See here." + airbyte_secret: true + order: 0 + source-calendly: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Go to Integrations → API & Webhooks to obtain your bearer\ + \ token. https://calendly.com/integrations/api_webhooks" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "calendly" + const: "calendly" + enum: + - "calendly" + order: 0 + type: "string" + source-calendly-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + description: + "Go to Integrations → API & Webhooks to obtain your bearer\ + \ token. https://calendly.com/integrations/api_webhooks" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-picqer: + type: "object" + required: + - "username" + - "organization_name" + - "start_date" + - "sourceType" + properties: + username: + type: "string" + title: "Username" + order: 0 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + organization_name: + type: "string" + description: "The organization name which is used to login to picqer" + title: "Organization Name" + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + sourceType: + title: "picqer" + const: "picqer" + enum: + - "picqer" + order: 0 + type: "string" + source-picqer-update: + type: "object" + required: + - "username" + - "organization_name" + - "start_date" + properties: + username: + type: "string" + title: "Username" + order: 0 + password: + type: "string" + title: "Password" + always_show: true + airbyte_secret: true + order: 1 + organization_name: + type: "string" + description: "The organization name which is used to login to picqer" + title: "Organization Name" + order: 2 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 3 + source-firebolt: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + x-speakeasy-param-sensitive: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + sourceType: + title: "firebolt" + const: "firebolt" + enum: + - "firebolt" + order: 0 + type: "string" + source-firebolt-update: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + source-clazar: + type: "object" + required: + - "client_id" + - "client_secret" + - "sourceType" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "clazar" + const: "clazar" + enum: + - "clazar" + order: 0 + type: "string" + source-clazar-update: + type: "object" + required: + - "client_id" + - "client_secret" + properties: + client_id: + type: "string" + order: 0 + title: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + order: 1 + title: "Client secret" + airbyte_secret: true + source-outreach: + title: "Source Outreach Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "redirect_uri" + - "start_date" + - "sourceType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Outreach developer application." + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Outreach developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining the new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + redirect_uri: + type: "string" + title: "Redirect URI" + description: + "A Redirect URI is the location where the authorization server\ + \ sends the user once the app has been successfully authorized and granted\ + \ an authorization code or access token." + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Outreach\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "outreach" + const: "outreach" + enum: + - "outreach" + order: 0 + type: "string" + source-outreach-update: + title: "Source Outreach Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "redirect_uri" + - "start_date" + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Outreach developer application." + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Outreach developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining the new access token." + airbyte_secret: true + redirect_uri: + type: "string" + title: "Redirect URI" + description: + "A Redirect URI is the location where the authorization server\ + \ sends the user once the app has been successfully authorized and granted\ + \ an authorization code or access token." + start_date: + type: "string" + title: "Start Date" + description: + "The date from which you'd like to replicate data for Outreach\ + \ API, in the format YYYY-MM-DDT00:00:00Z. All data generated after this\ + \ date will be replicated." + examples: + - "2020-11-16T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-pokeapi: + type: "object" + required: + - "pokemon_name" + - "sourceType" + properties: + pokemon_name: + type: "string" + description: "Pokemon requested from the API." + title: "Pokemon Name" + pattern: "^[a-z0-9_\\-]+$" + enum: + - "bulbasaur" + - "ivysaur" + - "venusaur" + - "charmander" + - "charmeleon" + - "charizard" + - "squirtle" + - "wartortle" + - "blastoise" + - "caterpie" + - "metapod" + - "butterfree" + - "weedle" + - "kakuna" + - "beedrill" + - "pidgey" + - "pidgeotto" + - "pidgeot" + - "rattata" + - "raticate" + - "spearow" + - "fearow" + - "ekans" + - "arbok" + - "pikachu" + - "raichu" + - "sandshrew" + - "sandslash" + - "nidoranf" + - "nidorina" + - "nidoqueen" + - "nidoranm" + - "nidorino" + - "nidoking" + - "clefairy" + - "clefable" + - "vulpix" + - "ninetales" + - "jigglypuff" + - "wigglytuff" + - "zubat" + - "golbat" + - "oddish" + - "gloom" + - "vileplume" + - "paras" + - "parasect" + - "venonat" + - "venomoth" + - "diglett" + - "dugtrio" + - "meowth" + - "persian" + - "psyduck" + - "golduck" + - "mankey" + - "primeape" + - "growlithe" + - "arcanine" + - "poliwag" + - "poliwhirl" + - "poliwrath" + - "abra" + - "kadabra" + - "alakazam" + - "machop" + - "machoke" + - "machamp" + - "bellsprout" + - "weepinbell" + - "victreebel" + - "tentacool" + - "tentacruel" + - "geodude" + - "graveler" + - "golem" + - "ponyta" + - "rapidash" + - "slowpoke" + - "slowbro" + - "magnemite" + - "magneton" + - "farfetchd" + - "doduo" + - "dodrio" + - "seel" + - "dewgong" + - "grimer" + - "muk" + - "shellder" + - "cloyster" + - "gastly" + - "haunter" + - "gengar" + - "onix" + - "drowzee" + - "hypno" + - "krabby" + - "kingler" + - "voltorb" + - "electrode" + - "exeggcute" + - "exeggutor" + - "cubone" + - "marowak" + - "hitmonlee" + - "hitmonchan" + - "lickitung" + - "koffing" + - "weezing" + - "rhyhorn" + - "rhydon" + - "chansey" + - "tangela" + - "kangaskhan" + - "horsea" + - "seadra" + - "goldeen" + - "seaking" + - "staryu" + - "starmie" + - "mrmime" + - "scyther" + - "jynx" + - "electabuzz" + - "magmar" + - "pinsir" + - "tauros" + - "magikarp" + - "gyarados" + - "lapras" + - "ditto" + - "eevee" + - "vaporeon" + - "jolteon" + - "flareon" + - "porygon" + - "omanyte" + - "omastar" + - "kabuto" + - "kabutops" + - "aerodactyl" + - "snorlax" + - "articuno" + - "zapdos" + - "moltres" + - "dratini" + - "dragonair" + - "dragonite" + - "mewtwo" + - "mew" + - "chikorita" + - "bayleef" + - "meganium" + - "cyndaquil" + - "quilava" + - "typhlosion" + - "totodile" + - "croconaw" + - "feraligatr" + - "sentret" + - "furret" + - "hoothoot" + - "noctowl" + - "ledyba" + - "ledian" + - "spinarak" + - "ariados" + - "crobat" + - "chinchou" + - "lanturn" + - "pichu" + - "cleffa" + - "igglybuff" + - "togepi" + - "togetic" + - "natu" + - "xatu" + - "mareep" + - "flaaffy" + - "ampharos" + - "bellossom" + - "marill" + - "azumarill" + - "sudowoodo" + - "politoed" + - "hoppip" + - "skiploom" + - "jumpluff" + - "aipom" + - "sunkern" + - "sunflora" + - "yanma" + - "wooper" + - "quagsire" + - "espeon" + - "umbreon" + - "murkrow" + - "slowking" + - "misdreavus" + - "unown" + - "wobbuffet" + - "girafarig" + - "pineco" + - "forretress" + - "dunsparce" + - "gligar" + - "steelix" + - "snubbull" + - "granbull" + - "qwilfish" + - "scizor" + - "shuckle" + - "heracross" + - "sneasel" + - "teddiursa" + - "ursaring" + - "slugma" + - "magcargo" + - "swinub" + - "piloswine" + - "corsola" + - "remoraid" + - "octillery" + - "delibird" + - "mantine" + - "skarmory" + - "houndour" + - "houndoom" + - "kingdra" + - "phanpy" + - "donphan" + - "porygon2" + - "stantler" + - "smeargle" + - "tyrogue" + - "hitmontop" + - "smoochum" + - "elekid" + - "magby" + - "miltank" + - "blissey" + - "raikou" + - "entei" + - "suicune" + - "larvitar" + - "pupitar" + - "tyranitar" + - "lugia" + - "ho-oh" + - "celebi" + - "treecko" + - "grovyle" + - "sceptile" + - "torchic" + - "combusken" + - "blaziken" + - "mudkip" + - "marshtomp" + - "swampert" + - "poochyena" + - "mightyena" + - "zigzagoon" + - "linoone" + - "wurmple" + - "silcoon" + - "beautifly" + - "cascoon" + - "dustox" + - "lotad" + - "lombre" + - "ludicolo" + - "seedot" + - "nuzleaf" + - "shiftry" + - "taillow" + - "swellow" + - "wingull" + - "pelipper" + - "ralts" + - "kirlia" + - "gardevoir" + - "surskit" + - "masquerain" + - "shroomish" + - "breloom" + - "slakoth" + - "vigoroth" + - "slaking" + - "nincada" + - "ninjask" + - "shedinja" + - "whismur" + - "loudred" + - "exploud" + - "makuhita" + - "hariyama" + - "azurill" + - "nosepass" + - "skitty" + - "delcatty" + - "sableye" + - "mawile" + - "aron" + - "lairon" + - "aggron" + - "meditite" + - "medicham" + - "electrike" + - "manectric" + - "plusle" + - "minun" + - "volbeat" + - "illumise" + - "roselia" + - "gulpin" + - "swalot" + - "carvanha" + - "sharpedo" + - "wailmer" + - "wailord" + - "numel" + - "camerupt" + - "torkoal" + - "spoink" + - "grumpig" + - "spinda" + - "trapinch" + - "vibrava" + - "flygon" + - "cacnea" + - "cacturne" + - "swablu" + - "altaria" + - "zangoose" + - "seviper" + - "lunatone" + - "solrock" + - "barboach" + - "whiscash" + - "corphish" + - "crawdaunt" + - "baltoy" + - "claydol" + - "lileep" + - "cradily" + - "anorith" + - "armaldo" + - "feebas" + - "milotic" + - "castform" + - "kecleon" + - "shuppet" + - "banette" + - "duskull" + - "dusclops" + - "tropius" + - "chimecho" + - "absol" + - "wynaut" + - "snorunt" + - "glalie" + - "spheal" + - "sealeo" + - "walrein" + - "clamperl" + - "huntail" + - "gorebyss" + - "relicanth" + - "luvdisc" + - "bagon" + - "shelgon" + - "salamence" + - "beldum" + - "metang" + - "metagross" + - "regirock" + - "regice" + - "registeel" + - "latias" + - "latios" + - "kyogre" + - "groudon" + - "rayquaza" + - "jirachi" + - "deoxys" + - "turtwig" + - "grotle" + - "torterra" + - "chimchar" + - "monferno" + - "infernape" + - "piplup" + - "prinplup" + - "empoleon" + - "starly" + - "staravia" + - "staraptor" + - "bidoof" + - "bibarel" + - "kricketot" + - "kricketune" + - "shinx" + - "luxio" + - "luxray" + - "budew" + - "roserade" + - "cranidos" + - "rampardos" + - "shieldon" + - "bastiodon" + - "burmy" + - "wormadam" + - "mothim" + - "combee" + - "vespiquen" + - "pachirisu" + - "buizel" + - "floatzel" + - "cherubi" + - "cherrim" + - "shellos" + - "gastrodon" + - "ambipom" + - "drifloon" + - "drifblim" + - "buneary" + - "lopunny" + - "mismagius" + - "honchkrow" + - "glameow" + - "purugly" + - "chingling" + - "stunky" + - "skuntank" + - "bronzor" + - "bronzong" + - "bonsly" + - "mimejr" + - "happiny" + - "chatot" + - "spiritomb" + - "gible" + - "gabite" + - "garchomp" + - "munchlax" + - "riolu" + - "lucario" + - "hippopotas" + - "hippowdon" + - "skorupi" + - "drapion" + - "croagunk" + - "toxicroak" + - "carnivine" + - "finneon" + - "lumineon" + - "mantyke" + - "snover" + - "abomasnow" + - "weavile" + - "magnezone" + - "lickilicky" + - "rhyperior" + - "tangrowth" + - "electivire" + - "magmortar" + - "togekiss" + - "yanmega" + - "leafeon" + - "glaceon" + - "gliscor" + - "mamoswine" + - "porygon-z" + - "gallade" + - "probopass" + - "dusknoir" + - "froslass" + - "rotom" + - "uxie" + - "mesprit" + - "azelf" + - "dialga" + - "palkia" + - "heatran" + - "regigigas" + - "giratina" + - "cresselia" + - "phione" + - "manaphy" + - "darkrai" + - "shaymin" + - "arceus" + - "victini" + - "snivy" + - "servine" + - "serperior" + - "tepig" + - "pignite" + - "emboar" + - "oshawott" + - "dewott" + - "samurott" + - "patrat" + - "watchog" + - "lillipup" + - "herdier" + - "stoutland" + - "purrloin" + - "liepard" + - "pansage" + - "simisage" + - "pansear" + - "simisear" + - "panpour" + - "simipour" + - "munna" + - "musharna" + - "pidove" + - "tranquill" + - "unfezant" + - "blitzle" + - "zebstrika" + - "roggenrola" + - "boldore" + - "gigalith" + - "woobat" + - "swoobat" + - "drilbur" + - "excadrill" + - "audino" + - "timburr" + - "gurdurr" + - "conkeldurr" + - "tympole" + - "palpitoad" + - "seismitoad" + - "throh" + - "sawk" + - "sewaddle" + - "swadloon" + - "leavanny" + - "venipede" + - "whirlipede" + - "scolipede" + - "cottonee" + - "whimsicott" + - "petilil" + - "lilligant" + - "basculin" + - "sandile" + - "krokorok" + - "krookodile" + - "darumaka" + - "darmanitan" + - "maractus" + - "dwebble" + - "crustle" + - "scraggy" + - "scrafty" + - "sigilyph" + - "yamask" + - "cofagrigus" + - "tirtouga" + - "carracosta" + - "archen" + - "archeops" + - "trubbish" + - "garbodor" + - "zorua" + - "zoroark" + - "minccino" + - "cinccino" + - "gothita" + - "gothorita" + - "gothitelle" + - "solosis" + - "duosion" + - "reuniclus" + - "ducklett" + - "swanna" + - "vanillite" + - "vanillish" + - "vanilluxe" + - "deerling" + - "sawsbuck" + - "emolga" + - "karrablast" + - "escavalier" + - "foongus" + - "amoonguss" + - "frillish" + - "jellicent" + - "alomomola" + - "joltik" + - "galvantula" + - "ferroseed" + - "ferrothorn" + - "klink" + - "klang" + - "klinklang" + - "tynamo" + - "eelektrik" + - "eelektross" + - "elgyem" + - "beheeyem" + - "litwick" + - "lampent" + - "chandelure" + - "axew" + - "fraxure" + - "haxorus" + - "cubchoo" + - "beartic" + - "cryogonal" + - "shelmet" + - "accelgor" + - "stunfisk" + - "mienfoo" + - "mienshao" + - "druddigon" + - "golett" + - "golurk" + - "pawniard" + - "bisharp" + - "bouffalant" + - "rufflet" + - "braviary" + - "vullaby" + - "mandibuzz" + - "heatmor" + - "durant" + - "deino" + - "zweilous" + - "hydreigon" + - "larvesta" + - "volcarona" + - "cobalion" + - "terrakion" + - "virizion" + - "tornadus" + - "thundurus" + - "reshiram" + - "zekrom" + - "landorus" + - "kyurem" + - "keldeo" + - "meloetta" + - "genesect" + - "chespin" + - "quilladin" + - "chesnaught" + - "fennekin" + - "braixen" + - "delphox" + - "froakie" + - "frogadier" + - "greninja" + - "bunnelby" + - "diggersby" + - "fletchling" + - "fletchinder" + - "talonflame" + - "scatterbug" + - "spewpa" + - "vivillon" + - "litleo" + - "pyroar" + - "flabebe" + - "floette" + - "florges" + - "skiddo" + - "gogoat" + - "pancham" + - "pangoro" + - "furfrou" + - "espurr" + - "meowstic" + - "honedge" + - "doublade" + - "aegislash" + - "spritzee" + - "aromatisse" + - "swirlix" + - "slurpuff" + - "inkay" + - "malamar" + - "binacle" + - "barbaracle" + - "skrelp" + - "dragalge" + - "clauncher" + - "clawitzer" + - "helioptile" + - "heliolisk" + - "tyrunt" + - "tyrantrum" + - "amaura" + - "aurorus" + - "sylveon" + - "hawlucha" + - "dedenne" + - "carbink" + - "goomy" + - "sliggoo" + - "goodra" + - "klefki" + - "phantump" + - "trevenant" + - "pumpkaboo" + - "gourgeist" + - "bergmite" + - "avalugg" + - "noibat" + - "noivern" + - "xerneas" + - "yveltal" + - "zygarde" + - "diancie" + - "hoopa" + - "volcanion" + - "rowlet" + - "dartrix" + - "decidueye" + - "litten" + - "torracat" + - "incineroar" + - "popplio" + - "brionne" + - "primarina" + - "pikipek" + - "trumbeak" + - "toucannon" + - "yungoos" + - "gumshoos" + - "grubbin" + - "charjabug" + - "vikavolt" + - "crabrawler" + - "crabominable" + - "oricorio" + - "cutiefly" + - "ribombee" + - "rockruff" + - "lycanroc" + - "wishiwashi" + - "mareanie" + - "toxapex" + - "mudbray" + - "mudsdale" + - "dewpider" + - "araquanid" + - "fomantis" + - "lurantis" + - "morelull" + - "shiinotic" + - "salandit" + - "salazzle" + - "stufful" + - "bewear" + - "bounsweet" + - "steenee" + - "tsareena" + - "comfey" + - "oranguru" + - "passimian" + - "wimpod" + - "golisopod" + - "sandygast" + - "palossand" + - "pyukumuku" + - "typenull" + - "silvally" + - "minior" + - "komala" + - "turtonator" + - "togedemaru" + - "mimikyu" + - "bruxish" + - "drampa" + - "dhelmise" + - "jangmo-o" + - "hakamo-o" + - "kommo-o" + - "tapukoko" + - "tapulele" + - "tapubulu" + - "tapufini" + - "cosmog" + - "cosmoem" + - "solgaleo" + - "lunala" + - "nihilego" + - "buzzwole" + - "pheromosa" + - "xurkitree" + - "celesteela" + - "kartana" + - "guzzlord" + - "necrozma" + - "magearna" + - "marshadow" + - "poipole" + - "naganadel" + - "stakataka" + - "blacephalon" + - "zeraora" + - "meltan" + - "melmetal" + - "grookey" + - "thwackey" + - "rillaboom" + - "scorbunny" + - "raboot" + - "cinderace" + - "sobble" + - "drizzile" + - "inteleon" + - "skwovet" + - "greedent" + - "rookidee" + - "corvisquire" + - "corviknight" + - "blipbug" + - "dottler" + - "orbeetle" + - "nickit" + - "thievul" + - "gossifleur" + - "eldegoss" + - "wooloo" + - "dubwool" + - "chewtle" + - "drednaw" + - "yamper" + - "boltund" + - "rolycoly" + - "carkol" + - "coalossal" + - "applin" + - "flapple" + - "appletun" + - "silicobra" + - "sandaconda" + - "cramorant" + - "arrokuda" + - "barraskewda" + - "toxel" + - "toxtricity" + - "sizzlipede" + - "centiskorch" + - "clobbopus" + - "grapploct" + - "sinistea" + - "polteageist" + - "hatenna" + - "hattrem" + - "hatterene" + - "impidimp" + - "morgrem" + - "grimmsnarl" + - "obstagoon" + - "perrserker" + - "cursola" + - "sirfetchd" + - "mrrime" + - "runerigus" + - "milcery" + - "alcremie" + - "falinks" + - "pincurchin" + - "snom" + - "frosmoth" + - "stonjourner" + - "eiscue" + - "indeedee" + - "morpeko" + - "cufant" + - "copperajah" + - "dracozolt" + - "arctozolt" + - "dracovish" + - "arctovish" + - "duraludon" + - "dreepy" + - "drakloak" + - "dragapult" + - "zacian" + - "zamazenta" + - "eternatus" + - "kubfu" + - "urshifu" + - "zarude" + - "regieleki" + - "regidrago" + - "glastrier" + - "spectrier" + - "calyrex" + examples: + - "ditto" + - "luxray" + - "snorlax" + order: 0 + sourceType: + title: "pokeapi" + const: "pokeapi" + enum: + - "pokeapi" + order: 0 + type: "string" + source-pokeapi-update: + type: "object" + required: + - "pokemon_name" + properties: + pokemon_name: + type: "string" + description: "Pokemon requested from the API." + title: "Pokemon Name" + pattern: "^[a-z0-9_\\-]+$" + enum: + - "bulbasaur" + - "ivysaur" + - "venusaur" + - "charmander" + - "charmeleon" + - "charizard" + - "squirtle" + - "wartortle" + - "blastoise" + - "caterpie" + - "metapod" + - "butterfree" + - "weedle" + - "kakuna" + - "beedrill" + - "pidgey" + - "pidgeotto" + - "pidgeot" + - "rattata" + - "raticate" + - "spearow" + - "fearow" + - "ekans" + - "arbok" + - "pikachu" + - "raichu" + - "sandshrew" + - "sandslash" + - "nidoranf" + - "nidorina" + - "nidoqueen" + - "nidoranm" + - "nidorino" + - "nidoking" + - "clefairy" + - "clefable" + - "vulpix" + - "ninetales" + - "jigglypuff" + - "wigglytuff" + - "zubat" + - "golbat" + - "oddish" + - "gloom" + - "vileplume" + - "paras" + - "parasect" + - "venonat" + - "venomoth" + - "diglett" + - "dugtrio" + - "meowth" + - "persian" + - "psyduck" + - "golduck" + - "mankey" + - "primeape" + - "growlithe" + - "arcanine" + - "poliwag" + - "poliwhirl" + - "poliwrath" + - "abra" + - "kadabra" + - "alakazam" + - "machop" + - "machoke" + - "machamp" + - "bellsprout" + - "weepinbell" + - "victreebel" + - "tentacool" + - "tentacruel" + - "geodude" + - "graveler" + - "golem" + - "ponyta" + - "rapidash" + - "slowpoke" + - "slowbro" + - "magnemite" + - "magneton" + - "farfetchd" + - "doduo" + - "dodrio" + - "seel" + - "dewgong" + - "grimer" + - "muk" + - "shellder" + - "cloyster" + - "gastly" + - "haunter" + - "gengar" + - "onix" + - "drowzee" + - "hypno" + - "krabby" + - "kingler" + - "voltorb" + - "electrode" + - "exeggcute" + - "exeggutor" + - "cubone" + - "marowak" + - "hitmonlee" + - "hitmonchan" + - "lickitung" + - "koffing" + - "weezing" + - "rhyhorn" + - "rhydon" + - "chansey" + - "tangela" + - "kangaskhan" + - "horsea" + - "seadra" + - "goldeen" + - "seaking" + - "staryu" + - "starmie" + - "mrmime" + - "scyther" + - "jynx" + - "electabuzz" + - "magmar" + - "pinsir" + - "tauros" + - "magikarp" + - "gyarados" + - "lapras" + - "ditto" + - "eevee" + - "vaporeon" + - "jolteon" + - "flareon" + - "porygon" + - "omanyte" + - "omastar" + - "kabuto" + - "kabutops" + - "aerodactyl" + - "snorlax" + - "articuno" + - "zapdos" + - "moltres" + - "dratini" + - "dragonair" + - "dragonite" + - "mewtwo" + - "mew" + - "chikorita" + - "bayleef" + - "meganium" + - "cyndaquil" + - "quilava" + - "typhlosion" + - "totodile" + - "croconaw" + - "feraligatr" + - "sentret" + - "furret" + - "hoothoot" + - "noctowl" + - "ledyba" + - "ledian" + - "spinarak" + - "ariados" + - "crobat" + - "chinchou" + - "lanturn" + - "pichu" + - "cleffa" + - "igglybuff" + - "togepi" + - "togetic" + - "natu" + - "xatu" + - "mareep" + - "flaaffy" + - "ampharos" + - "bellossom" + - "marill" + - "azumarill" + - "sudowoodo" + - "politoed" + - "hoppip" + - "skiploom" + - "jumpluff" + - "aipom" + - "sunkern" + - "sunflora" + - "yanma" + - "wooper" + - "quagsire" + - "espeon" + - "umbreon" + - "murkrow" + - "slowking" + - "misdreavus" + - "unown" + - "wobbuffet" + - "girafarig" + - "pineco" + - "forretress" + - "dunsparce" + - "gligar" + - "steelix" + - "snubbull" + - "granbull" + - "qwilfish" + - "scizor" + - "shuckle" + - "heracross" + - "sneasel" + - "teddiursa" + - "ursaring" + - "slugma" + - "magcargo" + - "swinub" + - "piloswine" + - "corsola" + - "remoraid" + - "octillery" + - "delibird" + - "mantine" + - "skarmory" + - "houndour" + - "houndoom" + - "kingdra" + - "phanpy" + - "donphan" + - "porygon2" + - "stantler" + - "smeargle" + - "tyrogue" + - "hitmontop" + - "smoochum" + - "elekid" + - "magby" + - "miltank" + - "blissey" + - "raikou" + - "entei" + - "suicune" + - "larvitar" + - "pupitar" + - "tyranitar" + - "lugia" + - "ho-oh" + - "celebi" + - "treecko" + - "grovyle" + - "sceptile" + - "torchic" + - "combusken" + - "blaziken" + - "mudkip" + - "marshtomp" + - "swampert" + - "poochyena" + - "mightyena" + - "zigzagoon" + - "linoone" + - "wurmple" + - "silcoon" + - "beautifly" + - "cascoon" + - "dustox" + - "lotad" + - "lombre" + - "ludicolo" + - "seedot" + - "nuzleaf" + - "shiftry" + - "taillow" + - "swellow" + - "wingull" + - "pelipper" + - "ralts" + - "kirlia" + - "gardevoir" + - "surskit" + - "masquerain" + - "shroomish" + - "breloom" + - "slakoth" + - "vigoroth" + - "slaking" + - "nincada" + - "ninjask" + - "shedinja" + - "whismur" + - "loudred" + - "exploud" + - "makuhita" + - "hariyama" + - "azurill" + - "nosepass" + - "skitty" + - "delcatty" + - "sableye" + - "mawile" + - "aron" + - "lairon" + - "aggron" + - "meditite" + - "medicham" + - "electrike" + - "manectric" + - "plusle" + - "minun" + - "volbeat" + - "illumise" + - "roselia" + - "gulpin" + - "swalot" + - "carvanha" + - "sharpedo" + - "wailmer" + - "wailord" + - "numel" + - "camerupt" + - "torkoal" + - "spoink" + - "grumpig" + - "spinda" + - "trapinch" + - "vibrava" + - "flygon" + - "cacnea" + - "cacturne" + - "swablu" + - "altaria" + - "zangoose" + - "seviper" + - "lunatone" + - "solrock" + - "barboach" + - "whiscash" + - "corphish" + - "crawdaunt" + - "baltoy" + - "claydol" + - "lileep" + - "cradily" + - "anorith" + - "armaldo" + - "feebas" + - "milotic" + - "castform" + - "kecleon" + - "shuppet" + - "banette" + - "duskull" + - "dusclops" + - "tropius" + - "chimecho" + - "absol" + - "wynaut" + - "snorunt" + - "glalie" + - "spheal" + - "sealeo" + - "walrein" + - "clamperl" + - "huntail" + - "gorebyss" + - "relicanth" + - "luvdisc" + - "bagon" + - "shelgon" + - "salamence" + - "beldum" + - "metang" + - "metagross" + - "regirock" + - "regice" + - "registeel" + - "latias" + - "latios" + - "kyogre" + - "groudon" + - "rayquaza" + - "jirachi" + - "deoxys" + - "turtwig" + - "grotle" + - "torterra" + - "chimchar" + - "monferno" + - "infernape" + - "piplup" + - "prinplup" + - "empoleon" + - "starly" + - "staravia" + - "staraptor" + - "bidoof" + - "bibarel" + - "kricketot" + - "kricketune" + - "shinx" + - "luxio" + - "luxray" + - "budew" + - "roserade" + - "cranidos" + - "rampardos" + - "shieldon" + - "bastiodon" + - "burmy" + - "wormadam" + - "mothim" + - "combee" + - "vespiquen" + - "pachirisu" + - "buizel" + - "floatzel" + - "cherubi" + - "cherrim" + - "shellos" + - "gastrodon" + - "ambipom" + - "drifloon" + - "drifblim" + - "buneary" + - "lopunny" + - "mismagius" + - "honchkrow" + - "glameow" + - "purugly" + - "chingling" + - "stunky" + - "skuntank" + - "bronzor" + - "bronzong" + - "bonsly" + - "mimejr" + - "happiny" + - "chatot" + - "spiritomb" + - "gible" + - "gabite" + - "garchomp" + - "munchlax" + - "riolu" + - "lucario" + - "hippopotas" + - "hippowdon" + - "skorupi" + - "drapion" + - "croagunk" + - "toxicroak" + - "carnivine" + - "finneon" + - "lumineon" + - "mantyke" + - "snover" + - "abomasnow" + - "weavile" + - "magnezone" + - "lickilicky" + - "rhyperior" + - "tangrowth" + - "electivire" + - "magmortar" + - "togekiss" + - "yanmega" + - "leafeon" + - "glaceon" + - "gliscor" + - "mamoswine" + - "porygon-z" + - "gallade" + - "probopass" + - "dusknoir" + - "froslass" + - "rotom" + - "uxie" + - "mesprit" + - "azelf" + - "dialga" + - "palkia" + - "heatran" + - "regigigas" + - "giratina" + - "cresselia" + - "phione" + - "manaphy" + - "darkrai" + - "shaymin" + - "arceus" + - "victini" + - "snivy" + - "servine" + - "serperior" + - "tepig" + - "pignite" + - "emboar" + - "oshawott" + - "dewott" + - "samurott" + - "patrat" + - "watchog" + - "lillipup" + - "herdier" + - "stoutland" + - "purrloin" + - "liepard" + - "pansage" + - "simisage" + - "pansear" + - "simisear" + - "panpour" + - "simipour" + - "munna" + - "musharna" + - "pidove" + - "tranquill" + - "unfezant" + - "blitzle" + - "zebstrika" + - "roggenrola" + - "boldore" + - "gigalith" + - "woobat" + - "swoobat" + - "drilbur" + - "excadrill" + - "audino" + - "timburr" + - "gurdurr" + - "conkeldurr" + - "tympole" + - "palpitoad" + - "seismitoad" + - "throh" + - "sawk" + - "sewaddle" + - "swadloon" + - "leavanny" + - "venipede" + - "whirlipede" + - "scolipede" + - "cottonee" + - "whimsicott" + - "petilil" + - "lilligant" + - "basculin" + - "sandile" + - "krokorok" + - "krookodile" + - "darumaka" + - "darmanitan" + - "maractus" + - "dwebble" + - "crustle" + - "scraggy" + - "scrafty" + - "sigilyph" + - "yamask" + - "cofagrigus" + - "tirtouga" + - "carracosta" + - "archen" + - "archeops" + - "trubbish" + - "garbodor" + - "zorua" + - "zoroark" + - "minccino" + - "cinccino" + - "gothita" + - "gothorita" + - "gothitelle" + - "solosis" + - "duosion" + - "reuniclus" + - "ducklett" + - "swanna" + - "vanillite" + - "vanillish" + - "vanilluxe" + - "deerling" + - "sawsbuck" + - "emolga" + - "karrablast" + - "escavalier" + - "foongus" + - "amoonguss" + - "frillish" + - "jellicent" + - "alomomola" + - "joltik" + - "galvantula" + - "ferroseed" + - "ferrothorn" + - "klink" + - "klang" + - "klinklang" + - "tynamo" + - "eelektrik" + - "eelektross" + - "elgyem" + - "beheeyem" + - "litwick" + - "lampent" + - "chandelure" + - "axew" + - "fraxure" + - "haxorus" + - "cubchoo" + - "beartic" + - "cryogonal" + - "shelmet" + - "accelgor" + - "stunfisk" + - "mienfoo" + - "mienshao" + - "druddigon" + - "golett" + - "golurk" + - "pawniard" + - "bisharp" + - "bouffalant" + - "rufflet" + - "braviary" + - "vullaby" + - "mandibuzz" + - "heatmor" + - "durant" + - "deino" + - "zweilous" + - "hydreigon" + - "larvesta" + - "volcarona" + - "cobalion" + - "terrakion" + - "virizion" + - "tornadus" + - "thundurus" + - "reshiram" + - "zekrom" + - "landorus" + - "kyurem" + - "keldeo" + - "meloetta" + - "genesect" + - "chespin" + - "quilladin" + - "chesnaught" + - "fennekin" + - "braixen" + - "delphox" + - "froakie" + - "frogadier" + - "greninja" + - "bunnelby" + - "diggersby" + - "fletchling" + - "fletchinder" + - "talonflame" + - "scatterbug" + - "spewpa" + - "vivillon" + - "litleo" + - "pyroar" + - "flabebe" + - "floette" + - "florges" + - "skiddo" + - "gogoat" + - "pancham" + - "pangoro" + - "furfrou" + - "espurr" + - "meowstic" + - "honedge" + - "doublade" + - "aegislash" + - "spritzee" + - "aromatisse" + - "swirlix" + - "slurpuff" + - "inkay" + - "malamar" + - "binacle" + - "barbaracle" + - "skrelp" + - "dragalge" + - "clauncher" + - "clawitzer" + - "helioptile" + - "heliolisk" + - "tyrunt" + - "tyrantrum" + - "amaura" + - "aurorus" + - "sylveon" + - "hawlucha" + - "dedenne" + - "carbink" + - "goomy" + - "sliggoo" + - "goodra" + - "klefki" + - "phantump" + - "trevenant" + - "pumpkaboo" + - "gourgeist" + - "bergmite" + - "avalugg" + - "noibat" + - "noivern" + - "xerneas" + - "yveltal" + - "zygarde" + - "diancie" + - "hoopa" + - "volcanion" + - "rowlet" + - "dartrix" + - "decidueye" + - "litten" + - "torracat" + - "incineroar" + - "popplio" + - "brionne" + - "primarina" + - "pikipek" + - "trumbeak" + - "toucannon" + - "yungoos" + - "gumshoos" + - "grubbin" + - "charjabug" + - "vikavolt" + - "crabrawler" + - "crabominable" + - "oricorio" + - "cutiefly" + - "ribombee" + - "rockruff" + - "lycanroc" + - "wishiwashi" + - "mareanie" + - "toxapex" + - "mudbray" + - "mudsdale" + - "dewpider" + - "araquanid" + - "fomantis" + - "lurantis" + - "morelull" + - "shiinotic" + - "salandit" + - "salazzle" + - "stufful" + - "bewear" + - "bounsweet" + - "steenee" + - "tsareena" + - "comfey" + - "oranguru" + - "passimian" + - "wimpod" + - "golisopod" + - "sandygast" + - "palossand" + - "pyukumuku" + - "typenull" + - "silvally" + - "minior" + - "komala" + - "turtonator" + - "togedemaru" + - "mimikyu" + - "bruxish" + - "drampa" + - "dhelmise" + - "jangmo-o" + - "hakamo-o" + - "kommo-o" + - "tapukoko" + - "tapulele" + - "tapubulu" + - "tapufini" + - "cosmog" + - "cosmoem" + - "solgaleo" + - "lunala" + - "nihilego" + - "buzzwole" + - "pheromosa" + - "xurkitree" + - "celesteela" + - "kartana" + - "guzzlord" + - "necrozma" + - "magearna" + - "marshadow" + - "poipole" + - "naganadel" + - "stakataka" + - "blacephalon" + - "zeraora" + - "meltan" + - "melmetal" + - "grookey" + - "thwackey" + - "rillaboom" + - "scorbunny" + - "raboot" + - "cinderace" + - "sobble" + - "drizzile" + - "inteleon" + - "skwovet" + - "greedent" + - "rookidee" + - "corvisquire" + - "corviknight" + - "blipbug" + - "dottler" + - "orbeetle" + - "nickit" + - "thievul" + - "gossifleur" + - "eldegoss" + - "wooloo" + - "dubwool" + - "chewtle" + - "drednaw" + - "yamper" + - "boltund" + - "rolycoly" + - "carkol" + - "coalossal" + - "applin" + - "flapple" + - "appletun" + - "silicobra" + - "sandaconda" + - "cramorant" + - "arrokuda" + - "barraskewda" + - "toxel" + - "toxtricity" + - "sizzlipede" + - "centiskorch" + - "clobbopus" + - "grapploct" + - "sinistea" + - "polteageist" + - "hatenna" + - "hattrem" + - "hatterene" + - "impidimp" + - "morgrem" + - "grimmsnarl" + - "obstagoon" + - "perrserker" + - "cursola" + - "sirfetchd" + - "mrrime" + - "runerigus" + - "milcery" + - "alcremie" + - "falinks" + - "pincurchin" + - "snom" + - "frosmoth" + - "stonjourner" + - "eiscue" + - "indeedee" + - "morpeko" + - "cufant" + - "copperajah" + - "dracozolt" + - "arctozolt" + - "dracovish" + - "arctovish" + - "duraludon" + - "dreepy" + - "drakloak" + - "dragapult" + - "zacian" + - "zamazenta" + - "eternatus" + - "kubfu" + - "urshifu" + - "zarude" + - "regieleki" + - "regidrago" + - "glastrier" + - "spectrier" + - "calyrex" + examples: + - "ditto" + - "luxray" + - "snorlax" + order: 0 + source-senseforce: + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + - "sourceType" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: + "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + order: 1 + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: + "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + order: 2 + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: + "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + format: "date" + order: 4 + sourceType: + title: "senseforce" + const: "senseforce" + enum: + - "senseforce" + order: 0 + type: "string" + source-senseforce-update: + type: "object" + required: + - "access_token" + - "backend_url" + - "dataset_id" + - "start_date" + properties: + access_token: + type: "string" + title: "API Access Token" + description: + "Your API access token. See here. The toke is case sensitive." + airbyte_secret: true + order: 0 + backend_url: + type: "string" + title: "Senseforce backend URL" + examples: + - "https://galaxyapi.senseforce.io" + description: + "Your Senseforce API backend URL. This is the URL shown during\ + \ the Login screen. See here for more details. (Note: Most Senseforce backend APIs have the\ + \ term 'galaxy' in their ULR)" + order: 1 + dataset_id: + type: "string" + title: "Dataset ID" + examples: + - "8f418098-ca28-4df5-9498-0df9fe78eda7" + description: + "The ID of the dataset you want to synchronize. The ID can\ + \ be found in the URL when opening the dataset. See here for more details. (Note: As the Senseforce API only allows to\ + \ synchronize a specific dataset, each dataset you want to synchronize\ + \ needs to be implemented as a separate airbyte source)." + order: 2 + start_date: + type: "string" + title: "The first day (in UTC) when to read data from." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + description: + "UTC date and time in the format 2017-01-25. Only data with\ + \ \"Timestamp\" after this date will be replicated. Important note: This\ + \ start date must be set to the first day of where your dataset provides\ + \ data. If your dataset has data from 2020-10-10 10:21:10, set the start_date\ + \ to 2020-10-10 or later" + examples: + - "2017-01-25" + format: "date" + order: 4 + source-freshsales: + type: "object" + required: + - "domain_name" + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshsales API Key. See here. The key is case sensitive." + airbyte_secret: true + x-speakeasy-param-sensitive: true + domain_name: + type: "string" + order: 0 + title: "Domain Name" + examples: + - "mydomain.myfreshworks.com" + description: "The Name of your Freshsales domain" + sourceType: + title: "freshsales" + const: "freshsales" + enum: + - "freshsales" + order: 0 + type: "string" + source-freshsales-update: + type: "object" + required: + - "domain_name" + - "api_key" + properties: + api_key: + type: "string" + order: 1 + title: "API Key" + description: + "Freshsales API Key. See here. The key is case sensitive." + airbyte_secret: true + domain_name: + type: "string" + order: 0 + title: "Domain Name" + examples: + - "mydomain.myfreshworks.com" + description: "The Name of your Freshsales domain" + source-hubplanner: + title: "Hubplanner Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + type: "string" + description: + "Hubplanner API key. See https://github.com/hubplanner/API#authentication\ + \ for more details." + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "hubplanner" + const: "hubplanner" + enum: + - "hubplanner" + order: 0 + type: "string" + source-hubplanner-update: + title: "Hubplanner Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + type: "string" + description: + "Hubplanner API key. See https://github.com/hubplanner/API#authentication\ + \ for more details." + airbyte_secret: true + source-square: + title: "Square Spec" + type: "object" + required: + - "is_sandbox" + - "sourceType" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to Square." + type: "object" + order: 0 + oneOf: + - title: "Oauth authentication" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "API key" + type: "object" + required: + - "auth_type" + - "api_key" + properties: + auth_type: + type: "string" + const: "API Key" + order: 1 + enum: + - "API Key" + api_key: + type: "string" + title: "API key token" + description: "The API key for a Square application" + airbyte_secret: true + x-speakeasy-param-sensitive: true + is_sandbox: + type: "boolean" + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + default: false + order: 1 + start_date: + type: "string" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If not set, all data will be replicated." + title: "Start Date" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + format: "date" + include_deleted_objects: + type: "boolean" + description: + "In some streams there is an option to include deleted objects\ + \ (Items, Categories, Discounts, Taxes)" + title: "Include Deleted Objects" + default: false + order: 3 + sourceType: + title: "square" + const: "square" + enum: + - "square" + order: 0 + type: "string" + source-square-update: + title: "Square Spec" + type: "object" + required: + - "is_sandbox" + properties: + credentials: + title: "Authentication" + description: "Choose how to authenticate to Square." + type: "object" + order: 0 + oneOf: + - title: "Oauth authentication" + type: "object" + required: + - "auth_type" + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_type: + type: "string" + const: "OAuth" + order: 0 + enum: + - "OAuth" + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: + "A refresh token generated using the above client ID\ + \ and secret" + airbyte_secret: true + - title: "API key" + type: "object" + required: + - "auth_type" + - "api_key" + properties: + auth_type: + type: "string" + const: "API Key" + order: 1 + enum: + - "API Key" + api_key: + type: "string" + title: "API key token" + description: "The API key for a Square application" + airbyte_secret: true + is_sandbox: + type: "boolean" + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + default: false + order: 1 + start_date: + type: "string" + description: + "UTC date in the format YYYY-MM-DD. Any data before this date\ + \ will not be replicated. If not set, all data will be replicated." + title: "Start Date" + default: "2021-01-01" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + order: 2 + format: "date" + include_deleted_objects: + type: "boolean" + description: + "In some streams there is an option to include deleted objects\ + \ (Items, Categories, Discounts, Taxes)" + title: "Include Deleted Objects" + default: false + order: 3 + source-paystack: + type: "object" + required: + - "start_date" + - "secret_key" + - "sourceType" + properties: + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 0 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set, the connector will always reload data from the past\ + \ N days, where N is the value set here. This is useful if your data is\ + \ updated after creation." + order: 1 + secret_key: + type: "string" + title: "Secret Key" + pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$" + description: + "The Paystack API key (usually starts with 'sk_live_'; find\ + \ yours here)." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "paystack" + const: "paystack" + enum: + - "paystack" + order: 0 + type: "string" + source-paystack-update: + type: "object" + required: + - "start_date" + - "secret_key" + properties: + start_date: + type: "string" + title: "Start Date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2017-01-25T00:00:00Z" + order: 0 + lookback_window_days: + type: "integer" + title: "Lookback Window (in days)" + default: 0 + minimum: 0 + description: + "When set, the connector will always reload data from the past\ + \ N days, where N is the value set here. This is useful if your data is\ + \ updated after creation." + order: 1 + secret_key: + type: "string" + title: "Secret Key" + pattern: "^(s|r)k_(live|test)_[a-zA-Z0-9]+$" + description: + "The Paystack API key (usually starts with 'sk_live_'; find\ + \ yours here)." + airbyte_secret: true + order: 2 + source-redshift: + title: "Redshift Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "sourceType" + properties: + host: + title: "Host" + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)." + type: "string" + order: 1 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + order: 2 + database: + title: "Database" + description: "Name of the database." + type: "string" + examples: + - "master" + order: 3 + schemas: + title: "Schemas" + description: + "The list of schemas to sync from. Specify one or more explicitly\ + \ or keep empty to process all schemas. Schema names are case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + examples: + - "public" + order: 4 + username: + title: "Username" + description: "Username to use to access the database." + type: "string" + order: 5 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 6 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + sourceType: + title: "redshift" + const: "redshift" + enum: + - "redshift" + order: 0 + type: "string" + source-redshift-update: + title: "Redshift Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + properties: + host: + title: "Host" + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)." + type: "string" + order: 1 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + order: 2 + database: + title: "Database" + description: "Name of the database." + type: "string" + examples: + - "master" + order: 3 + schemas: + title: "Schemas" + description: + "The list of schemas to sync from. Specify one or more explicitly\ + \ or keep empty to process all schemas. Schema names are case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + examples: + - "public" + order: 4 + username: + title: "Username" + description: "Username to use to access the database." + type: "string" + order: 5 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 7 + source-productive: + type: "object" + required: + - "api_key" + - "organization_id" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + organization_id: + type: "string" + description: + "The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations`\ + \ page" + order: 1 + title: "Organization ID" + sourceType: + title: "productive" + const: "productive" + enum: + - "productive" + order: 0 + type: "string" + source-productive-update: + type: "object" + required: + - "api_key" + - "organization_id" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + organization_id: + type: "string" + description: + "The organization ID which could be seen from `https://app.productive.io/xxxx-xxxx/settings/api-integrations`\ + \ page" + order: 1 + title: "Organization ID" + source-survicate: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + sourceType: + title: "survicate" + const: "survicate" + enum: + - "survicate" + order: 0 + type: "string" + source-survicate-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + source-braintree: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant ID" + description: + "The unique identifier for your entire gateway account. See\ + \ the docs for more information on how to obtain this ID." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: + "Braintree Public Key. See the docs for more information on how to obtain this key." + name: "Public Key" + type: "string" + private_key: + title: "Private Key" + description: + "Braintree Private Key. See the docs for more information on how to obtain this key." + name: "Private Key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + name: "Start Date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + title: "Environment" + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + sourceType: + title: "braintree" + const: "braintree" + enum: + - "braintree" + order: 0 + type: "string" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + - "sourceType" + source-braintree-update: + title: "Braintree Spec" + type: "object" + properties: + merchant_id: + title: "Merchant ID" + description: + "The unique identifier for your entire gateway account. See\ + \ the docs for more information on how to obtain this ID." + name: "Merchant ID" + type: "string" + public_key: + title: "Public Key" + description: + "Braintree Public Key. See the docs for more information on how to obtain this key." + name: "Public Key" + type: "string" + private_key: + title: "Private Key" + description: + "Braintree Private Key. See the docs for more information on how to obtain this key." + name: "Private Key" + airbyte_secret: true + type: "string" + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + name: "Start Date" + examples: + - "2020" + - "2020-12-30" + - "2020-11-22 20:20:05" + type: "string" + format: "date-time" + environment: + title: "Environment" + description: "Environment specifies where the data will come from." + name: "Environment" + examples: + - "sandbox" + - "production" + - "qa" + - "development" + enum: + - "Development" + - "Sandbox" + - "Qa" + - "Production" + type: "string" + required: + - "merchant_id" + - "public_key" + - "private_key" + - "environment" + source-mailchimp: + title: "Mailchimp Spec" + type: "object" + required: + - "sourceType" + properties: + credentials: + type: "object" + title: "Authentication" + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "auth_type" + - "access_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + title: "Access Token" + type: "string" + description: + "An access token generated using the above client ID\ + \ and secret." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "API Key" + required: + - "auth_type" + - "apikey" + properties: + auth_type: + type: "string" + const: "apikey" + order: 1 + enum: + - "apikey" + apikey: + type: "string" + title: "API Key" + description: + "Mailchimp API Key. See the docs for information on how to generate this key." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + title: "Incremental Sync Start Date" + description: + "The date from which you want to start syncing data for Incremental\ + \ streams. Only records that have been created or modified since this\ + \ date will be synced. If left blank, all data will by synced." + type: "string" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-01-01T00:00:00.000Z" + sourceType: + title: "mailchimp" + const: "mailchimp" + enum: + - "mailchimp" + order: 0 + type: "string" + source-mailchimp-update: + title: "Mailchimp Spec" + type: "object" + required: [] + properties: + credentials: + type: "object" + title: "Authentication" + oneOf: + - title: "OAuth2.0" + type: "object" + required: + - "auth_type" + - "access_token" + properties: + auth_type: + type: "string" + const: "oauth2.0" + order: 0 + enum: + - "oauth2.0" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + title: "Access Token" + type: "string" + description: + "An access token generated using the above client ID\ + \ and secret." + airbyte_secret: true + - type: "object" + title: "API Key" + required: + - "auth_type" + - "apikey" + properties: + auth_type: + type: "string" + const: "apikey" + order: 1 + enum: + - "apikey" + apikey: + type: "string" + title: "API Key" + description: + "Mailchimp API Key. See the docs for information on how to generate this key." + airbyte_secret: true + start_date: + title: "Incremental Sync Start Date" + description: + "The date from which you want to start syncing data for Incremental\ + \ streams. Only records that have been created or modified since this\ + \ date will be synced. If left blank, all data will by synced." + type: "string" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:MM:SS.000Z" + examples: + - "2020-01-01T00:00:00.000Z" + source-airtable: + title: "Airtable Source Spec" + type: "object" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + x-speakeasy-param-sensitive: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Personal Access Token" + type: "object" + required: + - "api_key" + properties: + auth_method: + type: "string" + const: "api_key" + enum: + - "api_key" + api_key: + type: "string" + description: + "The Personal Access Token for the Airtable account.\ + \ See the Support Guide for more information on how to obtain this token." + title: "Personal Access Token" + airbyte_secret: true + examples: + - "key1234567890" + x-speakeasy-param-sensitive: true + sourceType: + title: "airtable" + const: "airtable" + enum: + - "airtable" + order: 0 + type: "string" + source-airtable-update: + title: "Airtable Source Spec" + type: "object" + properties: + credentials: + title: "Authentication" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + access_token: + type: "string" + description: "Access Token for making authenticated requests." + airbyte_secret: true + token_expiry_date: + type: "string" + description: "The date-time when the access token should be refreshed." + format: "date-time" + refresh_token: + type: "string" + title: "Refresh token" + description: "The key to refresh the expired access token." + airbyte_secret: true + - title: "Personal Access Token" + type: "object" + required: + - "api_key" + properties: + auth_method: + type: "string" + const: "api_key" + enum: + - "api_key" + api_key: + type: "string" + description: + "The Personal Access Token for the Airtable account.\ + \ See the Support Guide for more information on how to obtain this token." + title: "Personal Access Token" + airbyte_secret: true + examples: + - "key1234567890" + source-mssql: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "sourceType" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 0 + certificate: + title: "Certificate" + type: "string" + description: + "certificate of the server, or of the CA that signed\ + \ the server certificate" + order: 1 + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 3600 + order: 3 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 4 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 5 + min: 1000 + max: 10000 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 6 + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + source-mssql-update: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 0 + certificate: + title: "Certificate" + type: "string" + description: + "certificate of the server, or of the CA that signed\ + \ the server certificate" + order: 1 + airbyte_secret: true + multiline: true + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: + "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: + "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 3600 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 3600 + order: 3 + invalid_cdc_cursor_position_behavior: + type: "string" + title: "Invalid CDC position behavior (Advanced)" + description: + "Determines whether Airbyte should fail or re-sync data\ + \ in case of an stale/invalid cursor value into the WAL. If 'Fail\ + \ sync' is chosen, a user will have to manually reset the connection\ + \ before being able to continue syncing data. If 'Re-sync data'\ + \ is chosen, Airbyte will automatically trigger a refresh but could\ + \ lead to higher cloud costs and data loss." + enum: + - "Fail sync" + - "Re-sync data" + default: "Fail sync" + order: 4 + queue_size: + type: "integer" + title: "Size of the queue (Advanced)" + description: + "The size of the internal queue. This may interfere with\ + \ memory consumption and efficiency of the connector, please be\ + \ careful." + default: 10000 + order: 5 + min: 1000 + max: 10000 + initial_load_timeout_hours: + type: "integer" + title: "Initial Load Timeout in Hours (Advanced)" + description: + "The amount of time an initial load is allowed to continue\ + \ for before catching up on CDC logs." + default: 8 + min: 4 + max: 24 + order: 6 + - title: "Scan Changes with User Defined Cursor" + description: + "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." + required: + - "method" + properties: + method: + type: "string" + const: "STANDARD" + order: 0 + enum: + - "STANDARD" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-dynamodb: + title: "Dynamodb Source Spec" + type: "object" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Access Keys" + type: + - "null" + - "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "User" + order: 0 + enum: + - "User" + access_key_id: + order: 1 + title: "Dynamodb Key Id" + type: "string" + description: + "The access key id to access Dynamodb. Airbyte requires\ + \ read permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + x-speakeasy-param-sensitive: true + secret_access_key: + order: 2 + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + x-speakeasy-param-sensitive: true + - type: "object" + title: "Role Based Authentication" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Role" + order: 0 + enum: + - "Role" + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + reserved_attribute_names: + title: "Reserved attribute names" + type: "string" + description: "Comma separated reserved attribute names present in your tables" + airbyte_secret: true + examples: + - "name, field_name, field-name" + x-speakeasy-param-sensitive: true + ignore_missing_read_permissions_tables: + title: "Ignore missing read permissions tables" + type: "boolean" + description: "Ignore tables with missing scan/read permissions" + default: false + sourceType: + title: "dynamodb" + const: "dynamodb" + enum: + - "dynamodb" + order: 0 + type: "string" + source-dynamodb-update: + title: "Dynamodb Source Spec" + type: "object" + properties: + credentials: + order: 0 + type: "object" + title: "Credentials" + description: "Credentials for the service" + oneOf: + - title: "Authenticate via Access Keys" + type: + - "null" + - "object" + required: + - "access_key_id" + - "secret_access_key" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "User" + order: 0 + enum: + - "User" + access_key_id: + order: 1 + title: "Dynamodb Key Id" + type: "string" + description: + "The access key id to access Dynamodb. Airbyte requires\ + \ read permissions to the database" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + order: 2 + title: "Dynamodb Access Key" + type: "string" + description: "The corresponding secret to the access key id." + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + - type: "object" + title: "Role Based Authentication" + additionalProperties: true + properties: + auth_type: + type: "string" + const: "Role" + order: 0 + enum: + - "Role" + endpoint: + title: "Dynamodb Endpoint" + type: "string" + default: "" + description: "the URL of the Dynamodb database" + examples: + - "https://{aws_dynamo_db_url}.com" + region: + title: "Dynamodb Region" + type: "string" + default: "" + description: "The region of the Dynamodb database" + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + reserved_attribute_names: + title: "Reserved attribute names" + type: "string" + description: "Comma separated reserved attribute names present in your tables" + airbyte_secret: true + examples: + - "name, field_name, field-name" + ignore_missing_read_permissions_tables: + title: "Ignore missing read permissions tables" + type: "boolean" + description: "Ignore tables with missing scan/read permissions" + default: false + source-kissmetrics: + type: "object" + required: + - "username" + - "sourceType" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "kissmetrics" + const: "kissmetrics" + enum: + - "kissmetrics" + order: 0 + type: "string" + source-kissmetrics-update: + type: "object" + required: + - "username" + properties: + username: + type: "string" + order: 0 + title: "Username" + password: + type: "string" + order: 1 + title: "Password" + always_show: true + airbyte_secret: true + source-salesforce: + title: "Salesforce Source Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "sourceType" + properties: + is_sandbox: + title: "Sandbox" + description: + "Toggle if you're using a Salesforce Sandbox" + type: "boolean" + default: false + order: 1 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: + "Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce\ + \ account." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + start_date: + title: "Start Date" + description: + "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\ + \ format. Airbyte will replicate the data updated on and after this date.\ + \ If this field is blank, Airbyte will replicate the data for last two\ + \ years." + type: "string" + pattern: "^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?)$" + pattern_descriptor: "YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ" + examples: + - "2021-07-25" + - "2021-07-25T00:00:00Z" + format: "date-time" + order: 5 + force_use_bulk_api: + title: "Force to use BULK API" + type: "boolean" + description: + "Toggle to use Bulk API (this might cause empty fields for\ + \ some streams)" + default: false + order: 6 + stream_slice_step: + title: "Stream Slice Step for Incremental sync" + type: "string" + description: "The size of the time window (ISO8601 duration) to slice requests." + default: "P30D" + order: 7 + examples: + - "PT12H" + - "P7D" + - "P30D" + - "P1M" + - "P1Y" + streams_criteria: + type: "array" + order: 8 + items: + type: "object" + required: + - "criteria" + - "value" + properties: + criteria: + type: "string" + title: "Search criteria" + enum: + - "starts with" + - "ends with" + - "contains" + - "exacts" + - "starts not with" + - "ends not with" + - "not contains" + - "not exacts" + order: 1 + default: "contains" + value: + type: "string" + title: "Search value" + order: 2 + title: "Filter Salesforce Objects" + description: + "Add filters to select only required stream based on `SObject`\ + \ name. Use this field to filter which tables are displayed by this connector.\ + \ This is useful if your Salesforce account has a large number of tables\ + \ (>1000), in which case you may find it easier to navigate the UI and\ + \ speed up the connector's performance if you restrict the tables displayed\ + \ by this connector." + sourceType: + title: "salesforce" + const: "salesforce" + enum: + - "salesforce" + order: 0 + type: "string" + source-salesforce-update: + title: "Salesforce Source Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + is_sandbox: + title: "Sandbox" + description: + "Toggle if you're using a Salesforce Sandbox" + type: "boolean" + default: false + order: 1 + auth_type: + type: "string" + const: "Client" + enum: + - "Client" + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + refresh_token: + title: "Refresh Token" + description: + "Enter your application's Salesforce Refresh Token used for Airbyte to access your Salesforce\ + \ account." + type: "string" + airbyte_secret: true + order: 4 + start_date: + title: "Start Date" + description: + "Enter the date (or date-time) in the YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ\ + \ format. Airbyte will replicate the data updated on and after this date.\ + \ If this field is blank, Airbyte will replicate the data for last two\ + \ years." + type: "string" + pattern: "^([0-9]{4}-[0-9]{2}-[0-9]{2}(T[0-9]{2}:[0-9]{2}:[0-9]{2}Z)?)$" + pattern_descriptor: "YYYY-MM-DD or YYYY-MM-DDTHH:mm:ssZ" + examples: + - "2021-07-25" + - "2021-07-25T00:00:00Z" + format: "date-time" + order: 5 + force_use_bulk_api: + title: "Force to use BULK API" + type: "boolean" + description: + "Toggle to use Bulk API (this might cause empty fields for\ + \ some streams)" + default: false + order: 6 + stream_slice_step: + title: "Stream Slice Step for Incremental sync" + type: "string" + description: "The size of the time window (ISO8601 duration) to slice requests." + default: "P30D" + order: 7 + examples: + - "PT12H" + - "P7D" + - "P30D" + - "P1M" + - "P1Y" + streams_criteria: + type: "array" + order: 8 + items: + type: "object" + required: + - "criteria" + - "value" + properties: + criteria: + type: "string" + title: "Search criteria" + enum: + - "starts with" + - "ends with" + - "contains" + - "exacts" + - "starts not with" + - "ends not with" + - "not contains" + - "not exacts" + order: 1 + default: "contains" + value: + type: "string" + title: "Search value" + order: 2 + title: "Filter Salesforce Objects" + description: + "Add filters to select only required stream based on `SObject`\ + \ name. Use this field to filter which tables are displayed by this connector.\ + \ This is useful if your Salesforce account has a large number of tables\ + \ (>1000), in which case you may find it easier to navigate the UI and\ + \ speed up the connector's performance if you restrict the tables displayed\ + \ by this connector." + source-clickhouse: + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "sourceType" + properties: + host: + description: "The host endpoint of the Clickhouse cluster." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "default" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with this username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "clickhouse" + const: "clickhouse" + enum: + - "clickhouse" + order: 0 + type: "string" + source-clickhouse-update: + title: "ClickHouse Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + description: "The host endpoint of the Clickhouse cluster." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "default" + order: 2 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 3 + password: + description: "The password associated with this username." + title: "Password" + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more\ + \ information read about JDBC URL parameters." + title: "JDBC URL Parameters (Advanced)" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 6 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + source-postmarkapp: + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + - "sourceType" + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + x-speakeasy-param-sensitive: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + x-speakeasy-param-sensitive: true + sourceType: + title: "postmarkapp" + const: "postmarkapp" + enum: + - "postmarkapp" + order: 0 + type: "string" + source-postmarkapp-update: + title: "Postmarkapp Spec" + type: "object" + required: + - "X-Postmark-Server-Token" + - "X-Postmark-Account-Token" + properties: + X-Postmark-Server-Token: + title: "X-Postmark-Server-Token" + type: "string" + description: "API Key for server" + airbyte_secret: true + X-Postmark-Account-Token: + title: "X-Postmark-Account-Token" + type: "string" + description: "API Key for account" + airbyte_secret: true + source-bitly: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + sourceType: + title: "bitly" + const: "bitly" + enum: + - "bitly" + order: 0 + type: "string" + source-bitly-update: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + end_date: + type: "string" + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 2 + source-hardcoded-records: + title: "Hardcoded Records Source Spec" + type: "object" + required: + - "sourceType" + properties: + count: + title: "Count" + description: "How many records per stream should be generated" + type: "integer" + minimum: 1 + default: 1000 + order: 0 + sourceType: + title: "hardcoded-records" + const: "hardcoded-records" + enum: + - "hardcoded-records" + order: 0 + type: "string" + source-hardcoded-records-update: + title: "Hardcoded Records Source Spec" + type: "object" + required: [] + properties: + count: + title: "Count" + description: "How many records per stream should be generated" + type: "integer" + minimum: 1 + default: 1000 + order: 0 + source-faker: + title: "Faker Source Spec" + type: "object" + required: + - "sourceType" + properties: + count: + title: "Count" + description: + "How many users should be generated in total. The purchases\ + \ table will be scaled to match, with 10 purchases created per 10 users.\ + \ This setting does not apply to the products stream." + type: "integer" + minimum: 1 + default: 1000 + order: 0 + seed: + title: "Seed" + description: + "Manually control the faker random seed to return the same\ + \ values on subsequent runs (leave -1 for random)" + type: "integer" + default: -1 + order: 1 + records_per_slice: + title: "Records Per Stream Slice" + description: + "How many fake records will be in each page (stream slice),\ + \ before a state message is emitted?" + type: "integer" + minimum: 1 + default: 1000 + order: 2 + always_updated: + title: "Always Updated" + description: + "Should the updated_at values for every record be new each\ + \ sync? Setting this to false will case the source to stop emitting records\ + \ after COUNT records have been emitted." + type: "boolean" + default: true + parallelism: + title: "Parallelism" + description: + "How many parallel workers should we use to generate fake data?\ + \ Choose a value equal to the number of CPUs you will allocate to this\ + \ source." + type: "integer" + minimum: 1 + default: 4 + order: 4 + sourceType: + title: "faker" + const: "faker" + enum: + - "faker" + order: 0 + type: "string" + source-faker-update: + title: "Faker Source Spec" + type: "object" + required: [] + properties: + count: + title: "Count" + description: + "How many users should be generated in total. The purchases\ + \ table will be scaled to match, with 10 purchases created per 10 users.\ + \ This setting does not apply to the products stream." + type: "integer" + minimum: 1 + default: 1000 + order: 0 + seed: + title: "Seed" + description: + "Manually control the faker random seed to return the same\ + \ values on subsequent runs (leave -1 for random)" + type: "integer" + default: -1 + order: 1 + records_per_slice: + title: "Records Per Stream Slice" + description: + "How many fake records will be in each page (stream slice),\ + \ before a state message is emitted?" + type: "integer" + minimum: 1 + default: 1000 + order: 2 + always_updated: + title: "Always Updated" + description: + "Should the updated_at values for every record be new each\ + \ sync? Setting this to false will case the source to stop emitting records\ + \ after COUNT records have been emitted." + type: "boolean" + default: true + parallelism: + title: "Parallelism" + description: + "How many parallel workers should we use to generate fake data?\ + \ Choose a value equal to the number of CPUs you will allocate to this\ + \ source." + type: "integer" + minimum: 1 + default: 4 + order: 4 + source-lever-hiring: + title: "Lever Hiring Source Spec" + type: "object" + required: + - "start_date" + - "sourceType" + properties: + credentials: + order: 3 + title: "Authentication Mechanism" + description: "Choose how to authenticate to Lever Hiring." + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Lever (OAuth)" + required: + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + - type: "object" + title: "Authenticate via Lever (Api Key)" + required: + - "api_key" + properties: + auth_type: + type: "string" + const: "Api Key" + order: 0 + enum: + - "Api Key" + api_key: + title: "Api key" + type: "string" + description: "The Api Key of your Lever Hiring account." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + start_date: + order: 0 + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. Note that it will be used\ + \ only in the following incremental streams: comments, commits, and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + environment: + order: 1 + type: "string" + title: "Environment" + description: + "The environment in which you'd like to replicate data for\ + \ Lever. This is used to determine which Lever API endpoint to use." + default: "Sandbox" + enum: + - "Production" + - "Sandbox" + sourceType: + title: "lever-hiring" + const: "lever-hiring" + enum: + - "lever-hiring" + order: 0 + type: "string" + source-lever-hiring-update: + title: "Lever Hiring Source Spec" + type: "object" + required: + - "start_date" + properties: + credentials: + order: 3 + title: "Authentication Mechanism" + description: "Choose how to authenticate to Lever Hiring." + type: "object" + oneOf: + - type: "object" + title: "Authenticate via Lever (OAuth)" + required: + - "refresh_token" + properties: + auth_type: + type: "string" + const: "Client" + order: 0 + enum: + - "Client" + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "The token for obtaining new access token." + airbyte_secret: true + - type: "object" + title: "Authenticate via Lever (Api Key)" + required: + - "api_key" + properties: + auth_type: + type: "string" + const: "Api Key" + order: 0 + enum: + - "Api Key" + api_key: + title: "Api key" + type: "string" + description: "The Api Key of your Lever Hiring account." + airbyte_secret: true + order: 1 + start_date: + order: 0 + type: "string" + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated. Note that it will be used\ + \ only in the following incremental streams: comments, commits, and issues." + examples: + - "2021-03-01T00:00:00Z" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + environment: + order: 1 + type: "string" + title: "Environment" + description: + "The environment in which you'd like to replicate data for\ + \ Lever. This is used to determine which Lever API endpoint to use." + default: "Sandbox" + enum: + - "Production" + - "Sandbox" + source-braze: + title: "Braze Spec" + type: "object" + required: + - "url" + - "api_key" + - "start_date" + - "sourceType" + properties: + url: + type: "string" + title: "URL" + description: "Braze REST API endpoint" + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Braze REST API key" + x-speakeasy-param-sensitive: true + start_date: + type: "string" + format: "date" + title: "Start date" + description: "Rows after this date will be synced" + sourceType: + title: "braze" + const: "braze" + enum: + - "braze" + order: 0 + type: "string" + source-braze-update: + title: "Braze Spec" + type: "object" + required: + - "url" + - "api_key" + - "start_date" + properties: + url: + type: "string" + title: "URL" + description: "Braze REST API endpoint" + api_key: + type: "string" + title: "Rest API Key" + airbyte_secret: true + description: "Braze REST API key" + start_date: + type: "string" + format: "date" + title: "Start date" + description: "Rows after this date will be synced" + source-sftp: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + - "sourceType" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + sourceType: + title: "sftp" + const: "sftp" + enum: + - "sftp" + order: 0 + type: "string" + source-sftp-update: + title: "SFTP Source Spec" + type: "object" + required: + - "user" + - "host" + - "port" + properties: + user: + title: "User Name" + description: "The server user" + type: "string" + order: 0 + host: + title: "Host Address" + description: "The server host address" + type: "string" + examples: + - "www.host.com" + - "192.0.2.1" + order: 1 + port: + title: "Port" + description: "The server port" + type: "integer" + default: 22 + examples: + - "22" + order: 2 + credentials: + type: "object" + title: "Authentication" + description: "The server authentication method" + order: 3 + oneOf: + - title: "Password Authentication" + required: + - "auth_method" + - "auth_user_password" + properties: + auth_method: + description: "Connect through password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + auth_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 1 + - title: "SSH Key Authentication" + required: + - "auth_method" + - "auth_ssh_key" + properties: + auth_method: + description: "Connect through ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + auth_ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 1 + file_types: + title: "File types" + description: + "Coma separated file types. Currently only 'csv' and 'json'\ + \ types are supported." + type: "string" + default: "csv,json" + order: 4 + examples: + - "csv,json" + - "csv" + folder_path: + title: "Folder Path" + description: "The directory to search files for sync" + type: "string" + default: "" + examples: + - "/logs/2022" + order: 5 + file_pattern: + title: "File Pattern" + description: + "The regular expression to specify files for sync in a chosen\ + \ Folder Path" + type: "string" + default: "" + examples: + - "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" + order: 6 + source-google-drive: + title: "Google Drive Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + folder_url: + title: "Folder Url" + description: + "URL for the folder you want to sync. Using individual streams\ + \ and glob patterns, it's possible to only sync a subset of all files\ + \ located in the folder." + examples: + - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn" + order: 0 + pattern: "^https://drive.google.com/.+" + pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID" + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Google Drive API" + type: "object" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + description: "Refresh Token for the Google Drive API" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + service_account_info: + title: "Service Account Information" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "service_account_info" + - "auth_type" + sourceType: + title: "google-drive" + const: "google-drive" + enum: + - "google-drive" + order: 0 + type: "string" + required: + - "streams" + - "folder_url" + - "credentials" + - "sourceType" + source-google-drive-update: + title: "Google Drive Source Spec" + description: + "Used during spec; allows the developer to configure the cloud\ + \ provider specific options\nthat are needed when users configure a file-based\ + \ source." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Document File Type Format (Experimental)" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "format" + folder_url: + title: "Folder Url" + description: + "URL for the folder you want to sync. Using individual streams\ + \ and glob patterns, it's possible to only sync a subset of all files\ + \ located in the folder." + examples: + - "https://drive.google.com/drive/folders/1Xaz0vXXXX2enKnNYU5qSt9NS70gvMyYn" + order: 0 + pattern: "^https://drive.google.com/.+" + pattern_descriptor: "https://drive.google.com/drive/folders/MY-FOLDER-ID" + type: "string" + credentials: + title: "Authentication" + description: "Credentials for connecting to the Google Drive API" + type: "object" + oneOf: + - title: "Authenticate via Google (OAuth)" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Client" + const: "Client" + enum: + - "Client" + type: "string" + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + refresh_token: + title: "Refresh Token" + description: "Refresh Token for the Google Drive API" + airbyte_secret: true + type: "string" + required: + - "client_id" + - "client_secret" + - "refresh_token" + - "auth_type" + - title: "Service Account Key Authentication" + type: "object" + properties: + auth_type: + title: "Auth Type" + default: "Service" + const: "Service" + enum: + - "Service" + type: "string" + service_account_info: + title: "Service Account Information" + description: + "The JSON key of the service account to use for authorization.\ + \ Read more here." + airbyte_secret: true + type: "string" + required: + - "service_account_info" + - "auth_type" + required: + - "streams" + - "folder_url" + - "credentials" + source-mailjet-sms: + type: "object" + required: + - "token" + - "sourceType" + properties: + end_date: + type: "integer" + title: "End date" + description: + "Retrieve SMS messages created before the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666281656 + order: 0 + start_date: + type: "integer" + title: "Start date" + description: + "Retrieve SMS messages created after the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666261656 + order: 1 + token: + type: "string" + title: "Access Token" + description: + "Your access token. See here." + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + sourceType: + title: "mailjet-sms" + const: "mailjet-sms" + enum: + - "mailjet-sms" + order: 0 + type: "string" + source-mailjet-sms-update: + type: "object" + required: + - "token" + properties: + end_date: + type: "integer" + title: "End date" + description: + "Retrieve SMS messages created before the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666281656 + order: 0 + start_date: + type: "integer" + title: "Start date" + description: + "Retrieve SMS messages created after the specified timestamp.\ + \ Required format - Unix timestamp." + pattern: "^[0-9]*$" + examples: + - 1666261656 + order: 1 + token: + type: "string" + title: "Access Token" + description: + "Your access token. See here." + airbyte_secret: true + order: 2 + source-chameleon: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + - "sourceType" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + filter: + type: "string" + description: "Filter for using in the `segments_experiences` stream" + enum: + - "tour" + - "survey" + - "launcher" + order: 3 + title: "Filter" + default: "tour" + end_date: + type: "string" + order: 4 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + sourceType: + title: "chameleon" + const: "chameleon" + enum: + - "chameleon" + order: 0 + type: "string" + source-chameleon-update: + type: "object" + required: + - "api_key" + - "start_date" + - "end_date" + properties: + api_key: + type: "string" + name: "api_key" + order: 0 + title: "API Key" + airbyte_secret: true + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + limit: + type: "string" + description: "Max records per page limit" + order: 2 + title: "Limit" + default: "50" + filter: + type: "string" + description: "Filter for using in the `segments_experiences` stream" + enum: + - "tour" + - "survey" + - "launcher" + order: 3 + title: "Filter" + default: "tour" + end_date: + type: "string" + order: 4 + title: "End date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + source-gcs: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be\nmodified to uptake the changes because it is responsible for\ + \ converting\nlegacy GCS configs into file based configs using the File-Based\ + \ CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + service_account: + title: "Service Account Information" + description: + "Enter your Google Cloud service account key in JSON format" + airbyte_secret: true + order: 0 + type: "string" + x-speakeasy-param-sensitive: true + bucket: + title: "Bucket" + description: "Name of the GCS bucket where the file(s) exist." + order: 2 + type: "string" + sourceType: + title: "gcs" + const: "gcs" + enum: + - "gcs" + order: 0 + type: "string" + required: + - "streams" + - "service_account" + - "bucket" + - "sourceType" + source-gcs-update: + title: "Config" + description: + "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be\nmodified to uptake the changes because it is responsible for\ + \ converting\nlegacy GCS configs into file based configs using the File-Based\ + \ CDK." + type: "object" + properties: + start_date: + title: "Start Date" + description: + "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: + "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + globs: + title: "Globs" + description: + "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + default: + - "**" + order: 1 + type: "array" + items: + type: "string" + validation_policy: + title: "Validation Policy" + description: + "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: + "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: + "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: + "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: + "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + required: + - "filetype" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: + "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: + "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: '"' + type: "string" + escape_char: + title: "Escape Character" + description: + "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: + "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: + "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: + "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: + "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: + "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + required: + - "header_definition_type" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + required: + - "header_definition_type" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: + "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + - "header_definition_type" + type: "object" + true_values: + title: "True Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: + "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + ignore_errors_on_fields_mismatch: + title: "Ignore errors on field mismatch" + description: + "Whether to ignore errors that occur when the number\ + \ of fields in the CSV does not match the number of columns\ + \ in the schema." + default: false + type: "boolean" + required: + - "filetype" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + required: + - "filetype" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: + "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + required: + - "filetype" + - title: "Unstructured Document Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "unstructured" + const: "unstructured" + type: "string" + enum: + - "unstructured" + skip_unprocessable_files: + title: "Skip Unprocessable Files" + description: + "If true, skip files that cannot be parsed and\ + \ pass the error message along as the _ab_source_file_parse_error\ + \ field. If false, fail the sync." + default: true + always_show: true + type: "boolean" + strategy: + title: "Parsing Strategy" + description: + "The strategy used to parse documents. `fast` extracts\ + \ text directly from the document which doesn't work for all\ + \ files. `ocr_only` is more reliable, but slower. `hi_res`\ + \ is the most reliable, but requires an API key and a hosted\ + \ instance of unstructured and can't be used with local mode.\ + \ See the unstructured.io documentation for more details:\ + \ https://unstructured-io.github.io/unstructured/core/partition.html#partition-pdf" + default: "auto" + always_show: true + order: 0 + enum: + - "auto" + - "fast" + - "ocr_only" + - "hi_res" + type: "string" + processing: + title: "Processing" + description: "Processing configuration" + default: + mode: "local" + type: "object" + oneOf: + - title: "Local" + type: "object" + properties: + mode: + title: "Mode" + default: "local" + const: "local" + enum: + - "local" + type: "string" + description: + "Process files locally, supporting `fast` and\ + \ `ocr` modes. This is the default option." + required: + - "mode" + - title: "via API" + type: "object" + properties: + mode: + title: "Mode" + default: "api" + const: "api" + enum: + - "api" + type: "string" + api_key: + title: "API Key" + description: "The API key to use matching the environment" + default: "" + always_show: true + airbyte_secret: true + type: "string" + api_url: + title: "API URL" + description: "The URL of the unstructured API to use" + default: "https://api.unstructured.io" + always_show: true + examples: + - "https://api.unstructured.com" + type: "string" + parameters: + title: "Additional URL Parameters" + description: "List of parameters send to the API" + default: [] + always_show: true + type: "array" + items: + title: "APIParameterConfigModel" + type: "object" + properties: + name: + title: "Parameter name" + description: + "The name of the unstructured API parameter\ + \ to use" + examples: + - "combine_under_n_chars" + - "languages" + type: "string" + value: + title: "Value" + description: "The value of the parameter" + examples: + - "true" + - "hi_res" + type: "string" + required: + - "name" + - "value" + description: + "Process files via an API, using the `hi_res`\ + \ mode. This option is useful for increased performance\ + \ and accuracy, but requires an API key and a hosted instance\ + \ of unstructured." + required: + - "mode" + description: + "Extract text from document formats (.pdf, .docx, .md,\ + \ .pptx) and emit as one record per file." + required: + - "filetype" + - title: "Excel Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "excel" + const: "excel" + type: "string" + enum: + - "excel" + required: + - "filetype" + schemaless: + title: "Schemaless" + description: + "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + recent_n_files_to_read_for_schema_discovery: + title: "Files To Read For Schema Discover" + description: + "The number of resent files which will be used to discover\ + \ the schema for this stream." + exclusiveMinimum: 0 + type: "integer" + required: + - "name" + - "format" + service_account: + title: "Service Account Information" + description: + "Enter your Google Cloud service account key in JSON format" + airbyte_secret: true + order: 0 + type: "string" + bucket: + title: "Bucket" + description: "Name of the GCS bucket where the file(s) exist." + order: 2 + type: "string" + required: + - "streams" + - "service_account" + - "bucket" + source-basecamp: + type: "object" + required: + - "account_id" + - "start_date" + - "client_id" + - "client_secret" + - "client_refresh_token_2" + - "sourceType" + properties: + account_id: + type: "number" + order: 0 + title: "Account ID" + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_id: + type: "string" + title: "Client ID" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client secret" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + client_refresh_token_2: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + sourceType: + title: "basecamp" + const: "basecamp" + enum: + - "basecamp" + order: 0 + type: "string" + source-basecamp-update: + type: "object" + required: + - "account_id" + - "start_date" + - "client_id" + - "client_secret" + - "client_refresh_token_2" + properties: + account_id: + type: "number" + order: 0 + title: "Account ID" + start_date: + type: "string" + order: 1 + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + client_id: + type: "string" + title: "Client ID" + airbyte_secret: true + order: 2 + client_secret: + type: "string" + title: "Client secret" + airbyte_secret: true + order: 3 + client_refresh_token_2: + type: "string" + title: "Refresh token" + airbyte_secret: true + order: 4 + source-qualaroo: + title: "Qualaroo Spec" + type: "object" + required: + - "token" + - "key" + - "start_date" + - "sourceType" + properties: + token: + type: "string" + title: "API token" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + key: + type: "string" + title: "API key" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00.000Z" + survey_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all surveys to which you have access will be\ + \ replicated." + sourceType: + title: "qualaroo" + const: "qualaroo" + enum: + - "qualaroo" + order: 0 + type: "string" + source-qualaroo-update: + title: "Qualaroo Spec" + type: "object" + required: + - "token" + - "key" + - "start_date" + properties: + token: + type: "string" + title: "API token" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + key: + type: "string" + title: "API key" + description: + "A Qualaroo token. See the docs for instructions on how to generate it." + airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z$" + description: + "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2021-03-01T00:00:00.000Z" + survey_ids: + type: "array" + items: + type: "string" + pattern: "^[0-9]{1,8}$" + title: "Qualaroo survey IDs" + description: + "IDs of the surveys from which you'd like to replicate data.\ + \ If left empty, data from all surveys to which you have access will be\ + \ replicated." + source-nytimes: + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + sourceType: + title: "nytimes" + const: "nytimes" + enum: + - "nytimes" + order: 0 + type: "string" + source-nytimes-update: + title: "Nytimes Spec" + type: "object" + required: + - "api_key" + - "start_date" + - "period" + properties: + api_key: + type: "string" + title: "API Key" + description: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start Date" + description: "Start date to begin the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 1 + end_date: + type: "string" + title: "End Date" + description: "End date to stop the article retrieval (format YYYY-MM)" + pattern: "^[0-9]{4}-[0-9]{2}$" + examples: + - "2022-08" + - "1851-01" + order: 2 + period: + type: "integer" + title: "Period (used for Most Popular streams)" + description: "Period of time (in days)" + order: 3 + enum: + - 1 + - 7 + - 30 + share_type: + type: "string" + title: "Share Type (used for Most Popular Shared stream)" + description: "Share Type" + order: 4 + enum: + - "facebook" + source-greenhouse: + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + - "sourceType" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + sourceType: + title: "greenhouse" + const: "greenhouse" + enum: + - "greenhouse" + order: 0 + type: "string" + source-greenhouse-update: + title: "Greenhouse Spec" + type: "object" + required: + - "api_key" + properties: + api_key: + title: "API Key" + type: "string" + description: + "Greenhouse API Key. See the docs for more information on how to generate this key." + airbyte_secret: true + order: 0 + source-front: + type: "object" + required: + - "api_key" + - "start_date" + - "sourceType" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + x-speakeasy-param-sensitive: true + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + page_limit: + type: "string" + description: "Page limit for the responses" + title: "Page limit" + default: "50" + order: 2 + sourceType: + title: "front" + const: "front" + enum: + - "front" + order: 0 + type: "string" + source-front-update: + type: "object" + required: + - "api_key" + - "start_date" + properties: + api_key: + type: "string" + title: "API Key" + airbyte_secret: true + order: 0 + start_date: + type: "string" + title: "Start date" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + order: 1 + page_limit: + type: "string" + description: "Page limit for the responses" + title: "Page limit" + default: "50" + order: 2 + trello: + title: null + zendesk-chat: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Zendesk Chat Spec" + google-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + order: 1 + description: + "The Client ID of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + client_secret: + type: "string" + title: "Client Secret" + order: 2 + description: + "The Client Secret of your Google Ads developer application.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + developer_token: + type: "string" + title: "Developer Token" + order: 0 + description: + "The Developer Token granted by Google to use their APIs.\ + \ For detailed instructions on finding this value, refer to our documentation." + airbyte_secret: true + title: "Google Ads Spec" + google-search-console: + properties: + authorization: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The client ID of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The client secret of your Google Search Console developer\ + \ application. Read more here." + airbyte_secret: true + title: "Google Search Console Spec" + shopify: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the Shopify developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the Shopify developer application." + airbyte_secret: true + order: 2 + title: "Shopify Source CDK Specifications" + retently: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Retently developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Retently developer application." + airbyte_secret: true + title: "Retently Api Spec" + instagram: + properties: + client_id: + title: "Client Id" + description: "The Client ID for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your Oauth application" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Instagram" + azure-blob-storage: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "SourceAzureBlobStorageSpec" + zendesk-sunshine: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: null + snapchat-marketing: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Snapchat developer application." + airbyte_secret: true + order: 0 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Snapchat developer application." + airbyte_secret: true + order: 1 + title: "Snapchat Marketing Spec" + gitlab: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the Gitlab developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the Gitlab developer application." + airbyte_secret: true + title: "Source Gitlab Spec" + snowflake: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Snowflake developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your Snowflake developer application." + airbyte_secret: true + order: 2 + title: "Snowflake Source Spec" + microsoft-sharepoint: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft SharePoint Source Spec" + smartsheets: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The API ID of the SmartSheets developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The API Secret the SmartSheets developer application." + airbyte_secret: true + title: "Smartsheets Source Spec" + notion: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "The Client ID of your Notion integration. See our docs\ + \ for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "The Client Secret of your Notion integration. See our\ + \ docs\ + \ for more information." + airbyte_secret: true + title: "Notion Source Spec" + slack: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "Slack client_id. See our docs if you need help finding this id." + client_secret: + type: "string" + title: "Client Secret" + description: + "Slack client_secret. See our docs if you need help finding this secret." + airbyte_secret: true + title: "Slack Spec" + youtube-analytics: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your developer application" + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The client secret of your developer application" + airbyte_secret: true + title: "YouTube Analytics Spec" + google-sheets: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: + "Enter your Google application's Client ID. See Google's\ + \ documentation for more information." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: + "Enter your Google application's Client Secret. See Google's\ + \ documentation for more information." + airbyte_secret: true + title: "Google Sheets Source Spec" + zendesk-talk: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Client Secret" + airbyte_secret: true + title: "Source Zendesk Talk Spec" + asana: + properties: + credentials: + properties: + client_id: + type: "string" + title: "" + description: "" + airbyte_secret: true + client_secret: + type: "string" + title: "" + description: "" + airbyte_secret: true + title: "Asana Spec" + microsoft-teams: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Microsoft Teams developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Microsoft Teams developer application." + airbyte_secret: true + title: "Microsoft Teams Spec" + amazon-seller-partner: + properties: + lwa_app_id: + title: "LWA Client Id" + description: "Your Login with Amazon Client ID." + order: 4 + airbyte_secret: true + type: "string" + lwa_client_secret: + title: "LWA Client Secret" + description: "Your Login with Amazon Client Secret." + airbyte_secret: true + order: 5 + type: "string" + title: "Amazon Seller Partner Spec" + linkedin-ads: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." + airbyte_secret: true + title: "Linkedin Ads Spec" + pinterest: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Pinterest Spec" + zendesk-support: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: + "The OAuth client's ID. See this guide for more information." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: + "The OAuth client secret. See this guide for more information." + airbyte_secret: true + title: "Source Zendesk Support Spec" + microsoft-onedrive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID of your Microsoft developer application" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret of your Microsoft developer application" + airbyte_secret: true + type: "string" + title: "Microsoft OneDrive Source Spec" + tiktok-marketing: + properties: + credentials: + properties: + app_id: + title: "App ID" + description: "The Developer Application App ID." + airbyte_secret: true + type: "string" + secret: + title: "Secret" + description: "The Developer Application Secret." + airbyte_secret: true + type: "string" + title: "TikTok Marketing Source Spec" + hubspot: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: + "The Client ID of your HubSpot developer application. See\ + \ the Hubspot docs if you need help finding this ID." + type: "string" + examples: + - "123456789000" + client_secret: + title: "Client Secret" + description: + "The client secret for your HubSpot developer application.\ + \ See the Hubspot docs if you need help finding this secret." + type: "string" + examples: + - "secret" + airbyte_secret: true + title: "HubSpot Source Spec" + google-analytics-data-api: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Analytics developer application." + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Analytics developer application." + airbyte_secret: true + order: 2 + title: "Google Analytics (Data API) Spec" + intercom: + properties: + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + order: 1 + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true + order: 2 + title: "Source Intercom Spec" + typeform: + properties: + credentials: + properties: + client_id: + type: "string" + description: "The Client ID of the Typeform developer application." + airbyte_secret: true + client_secret: + type: "string" + description: "The Client Secret the Typeform developer application." + airbyte_secret: true + title: null + facebook-marketing: + properties: + credentials: + properties: + client_id: + title: "Client Id" + description: "The Client Id for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + client_secret: + title: "Client Secret" + description: "The Client Secret for your OAuth app" + airbyte_secret: true + airbyte_hidden: true + type: "string" + title: "Source Facebook Marketing" + surveymonkey: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of the SurveyMonkey developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of the SurveyMonkey developer application." + airbyte_secret: true + order: 2 + title: null + bing-ads: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your Microsoft Advertising developer application." + airbyte_secret: true + order: 1 + client_secret: + type: "string" + title: "Client Secret" + description: + "The Client Secret of your Microsoft Advertising developer\ + \ application." + default: "" + airbyte_secret: true + order: 2 + title: "Bing Ads Spec" + monday: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Monday Spec" + amazon-ads: + properties: + client_id: + title: "Client ID" + description: + "The client ID of your Amazon Ads developer application. See\ + \ the docs for more information." + order: 1 + type: "string" + airbyte_secret: true + client_secret: + title: "Client Secret" + description: + "The client secret of your Amazon Ads developer application.\ + \ See the docs for more information." + airbyte_secret: true + order: 2 + type: "string" + title: "Amazon Ads Spec" + github: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client Id" + description: "OAuth Client Id" + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "OAuth Client secret" + airbyte_secret: true + title: "GitHub Source Spec" + square: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The Square-issued ID of your application" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Square-issued application secret for your application" + airbyte_secret: true + title: "Square Spec" + mailchimp: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + title: "Mailchimp Spec" + airtable: + properties: + credentials: + properties: + client_id: + type: "string" + title: "Client ID" + description: "The client ID of the Airtable developer application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client secret" + description: "The client secret the Airtable developer application." + airbyte_secret: true + title: "Airtable Source Spec" + salesforce: + properties: + client_id: + title: "Client ID" + description: + "Enter your Salesforce developer application's Client ID" + type: "string" + order: 2 + client_secret: + title: "Client Secret" + description: + "Enter your Salesforce developer application's Client secret" + type: "string" + airbyte_secret: true + order: 3 + title: "Salesforce Source Spec" + lever-hiring: + properties: + credentials: + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Lever Hiring developer application." + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Lever Hiring developer application." + airbyte_secret: true + title: "Lever Hiring Source Spec" + google-drive: + properties: + credentials: + properties: + client_id: + title: "Client ID" + description: "Client ID for the Google Drive API" + airbyte_secret: true + type: "string" + client_secret: + title: "Client Secret" + description: "Client Secret for the Google Drive API" + airbyte_secret: true + type: "string" + title: "Google Drive Source Spec" + destination-gcs: + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + - "format" + - "destinationType" + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + order: 1 + type: "string" + description: + "You can find the bucket name in the App Engine Admin console\ + \ Application Settings page, under the label Google Cloud Storage Bucket.\ + \ Read more here." + examples: + - "airbyte_sync" + gcs_bucket_path: + title: "GCS Bucket Path" + description: + "GCS Bucket Path string Subdirectory under the above bucket\ + \ to sync the data into." + order: 2 + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + order: 3 + default: "us" + description: + "Select a Region of the GCS Bucket. Read more here." + enum: + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "southamerica-east1" + - "southamerica-west1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "asia" + - "eu" + - "us" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Authentication" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read more\ + \ here." + type: "object" + order: 0 + oneOf: + - title: "HMAC Key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "When linked to a service account, this ID is 61 characters\ + \ long; when linked to a user account, it is 24 characters long.\ + \ Read more here." + title: "Access ID" + airbyte_secret: true + order: 0 + examples: + - "1234567890abcdefghij1234" + x-speakeasy-param-sensitive: true + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string. Read more here." + title: "Secret" + airbyte_secret: true + order: 1 + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + x-speakeasy-param-sensitive: true + format: + title: "Output Format" + type: "object" + description: + "Output data format. One of the following formats must be selected\ + \ - AVRO format, PARQUET format, CSV format, or JSONL format." + order: 4 + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "The presets 0-3 are fast presets with medium compression.\ + \ The presets 4-6 are fairly slow presets with high compression.\ + \ The default preset is 6. The presets 7-9 are like the preset\ + \ 6 but use bigger dictionaries and have higher compressor\ + \ and decompressor memory requirements. Unless the uncompressed\ + \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\ + \ waste of memory to use the presets 7, 8, or 9, respectively.\ + \ Read more here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization" + description: + "Whether the input JSON data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + default: "UNCOMPRESSED" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + destinationType: + title: "gcs" + const: "gcs" + enum: + - "gcs" + order: 0 + type: "string" + destination-gcs-update: + title: "GCS Destination Spec" + type: "object" + required: + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + - "format" + properties: + gcs_bucket_name: + title: "GCS Bucket Name" + order: 1 + type: "string" + description: + "You can find the bucket name in the App Engine Admin console\ + \ Application Settings page, under the label Google Cloud Storage Bucket.\ + \ Read more here." + examples: + - "airbyte_sync" + gcs_bucket_path: + title: "GCS Bucket Path" + description: + "GCS Bucket Path string Subdirectory under the above bucket\ + \ to sync the data into." + order: 2 + type: "string" + examples: + - "data_sync/test" + gcs_bucket_region: + title: "GCS Bucket Region" + type: "string" + order: 3 + default: "us" + description: + "Select a Region of the GCS Bucket. Read more here." + enum: + - "northamerica-northeast1" + - "northamerica-northeast2" + - "us-central1" + - "us-east1" + - "us-east4" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + - "southamerica-east1" + - "southamerica-west1" + - "europe-central2" + - "europe-north1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "asia" + - "eu" + - "us" + - "asia1" + - "eur4" + - "nam4" + credential: + title: "Authentication" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read more\ + \ here." + type: "object" + order: 0 + oneOf: + - title: "HMAC Key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + enum: + - "HMAC_KEY" + default: "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "When linked to a service account, this ID is 61 characters\ + \ long; when linked to a user account, it is 24 characters long.\ + \ Read more here." + title: "Access ID" + airbyte_secret: true + order: 0 + examples: + - "1234567890abcdefghij1234" + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It is a\ + \ 40-character base-64 encoded string. Read more here." + title: "Secret" + airbyte_secret: true + order: 1 + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + format: + title: "Output Format" + type: "object" + description: + "Output data format. One of the following formats must be selected\ + \ - AVRO format, PARQUET format, CSV format, or JSONL format." + order: 4 + oneOf: + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + type: "string" + enum: + - "Avro" + default: "Avro" + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "The presets 0-3 are fast presets with medium compression.\ + \ The presets 4-6 are fairly slow presets with high compression.\ + \ The default preset is 6. The presets 7-9 are like the preset\ + \ 6 but use bigger dictionaries and have higher compressor\ + \ and decompressor memory requirements. Unless the uncompressed\ + \ size of the file exceeds 8 MiB, 16 MiB, or 32 MiB, it is\ + \ waste of memory to use the presets 7, 8, or 9, respectively.\ + \ Read more here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Normalization" + description: + "Whether the input JSON data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + default: "UNCOMPRESSED" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + destination-clickhouse: + title: "ClickHouse Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "HTTP port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: false + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "clickhouse" + const: "clickhouse" + enum: + - "clickhouse" + order: 0 + type: "string" + destination-clickhouse-update: + title: "ClickHouse Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "HTTP port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 8123 + examples: + - "8123" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: false + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-mssql: + title: "MS SQL Server Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "The host name of the MSSQL database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the MSSQL database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1433 + examples: + - "1433" + order: 1 + database: + title: "DB Name" + description: "The name of the MSSQL database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with this username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used to communicate with the\ + \ database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "The data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + destination-mssql-update: + title: "MS SQL Server Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "The host name of the MSSQL database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the MSSQL database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1433 + examples: + - "1433" + order: 1 + database: + title: "DB Name" + description: "The name of the MSSQL database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "The username which is used to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "The password associated with this username." + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: + "The encryption method which is used to communicate with the\ + \ database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "The data transfer will not be encrypted." + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Encrypted (trust server certificate)" + description: + "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + default: "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + type: "object" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: + "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-mysql: + title: "MySQL Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + raw_data_schema: + type: "string" + description: "The database to write raw tables into" + title: "Raw table database (defaults to airbyte_internal)" + order: 7 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mysql" + const: "mysql" + enum: + - "mysql" + order: 0 + type: "string" + destination-mysql-update: + title: "MySQL Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 3306 + examples: + - "3306" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + description: "Encrypt data using SSL." + type: "boolean" + default: true + order: 5 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 6 + raw_data_schema: + type: "string" + description: "The database to write raw tables into" + title: "Raw table database (defaults to airbyte_internal)" + order: 7 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-pubsub: + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + - "ordering_enabled" + - "batching_enabled" + - "destinationType" + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub." + title: "Project ID" + topic_id: + type: "string" + description: "The PubSub topic ID in the given GCP project ID." + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + ordering_enabled: + title: "Message Ordering Enabled" + description: + "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\ + \ of stream" + type: "boolean" + default: false + batching_enabled: + type: "boolean" + title: "Message Batching Enabled" + description: + "If TRUE messages will be buffered instead of sending them\ + \ one by one" + default: false + batching_delay_threshold: + type: "integer" + title: "Message Batching: Delay Threshold" + description: "Number of ms before the buffer is flushed" + default: 1 + minimum: 1 + batching_element_count_threshold: + type: "integer" + title: "Message Batching: Element Count Threshold" + description: "Number of messages before the buffer is flushed" + default: 1 + minimum: 1 + batching_request_bytes_threshold: + type: "integer" + title: "Message Batching: Request Bytes Threshold" + description: "Number of bytes before the buffer is flushed" + default: 1 + minimum: 1 + destinationType: + title: "pubsub" + const: "pubsub" + enum: + - "pubsub" + order: 0 + type: "string" + destination-pubsub-update: + title: "Google PubSub Destination Spec" + type: "object" + required: + - "project_id" + - "topic_id" + - "credentials_json" + - "ordering_enabled" + - "batching_enabled" + properties: + project_id: + type: "string" + description: "The GCP project ID for the project containing the target PubSub." + title: "Project ID" + topic_id: + type: "string" + description: "The PubSub topic ID in the given GCP project ID." + title: "PubSub Topic ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key." + title: "Credentials JSON" + airbyte_secret: true + ordering_enabled: + title: "Message Ordering Enabled" + description: + "If TRUE PubSub publisher will have message ordering enabled. Every message will have an ordering key\ + \ of stream" + type: "boolean" + default: false + batching_enabled: + type: "boolean" + title: "Message Batching Enabled" + description: + "If TRUE messages will be buffered instead of sending them\ + \ one by one" + default: false + batching_delay_threshold: + type: "integer" + title: "Message Batching: Delay Threshold" + description: "Number of ms before the buffer is flushed" + default: 1 + minimum: 1 + batching_element_count_threshold: + type: "integer" + title: "Message Batching: Element Count Threshold" + description: "Number of messages before the buffer is flushed" + default: 1 + minimum: 1 + batching_request_bytes_threshold: + type: "integer" + title: "Message Batching: Request Bytes Threshold" + description: "Number of bytes before the buffer is flushed" + default: 1 + minimum: 1 + destination-weaviate: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "No external embedding" + type: "object" + properties: + mode: + title: "Mode" + default: "no_embedding" + const: "no_embedding" + enum: + - "no_embedding" + type: "string" + description: + "Do not calculate and pass embeddings to Weaviate. Suitable\ + \ for clusters with configured vectorizers to calculate embeddings within\ + \ Weaviate or for classes that should only support regular text search." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + - "mode" + description: + "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Weaviate cluster." + order: 1 + examples: + - "https://my-cluster.weaviate.network" + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Weaviate instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Weaviate\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Weaviate cluster" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Weaviate cluster" + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Weaviate clusters)" + - title: "No Authentication" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + batch_size: + title: "Batch Size" + description: "The number of records to send to Weaviate in each batch" + default: 128 + type: "integer" + text_field: + title: "Text Field" + description: "The field in the object that contains the embedded text" + default: "text" + type: "string" + tenant_id: + title: "Tenant ID" + description: "The tenant ID to use for multi tenancy" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + default_vectorizer: + title: "Default Vectorizer" + description: "The vectorizer to use if new classes need to be created" + default: "none" + enum: + - "none" + - "text2vec-cohere" + - "text2vec-huggingface" + - "text2vec-openai" + - "text2vec-palm" + - "text2vec-contextionary" + - "text2vec-transformers" + - "text2vec-gpt4all" + type: "string" + additional_headers: + title: "Additional headers" + description: "Additional HTTP headers to send with every request." + default: [] + examples: + - header_key: "X-OpenAI-Api-Key" + value: "my-openai-api-key" + type: "array" + items: + title: "Header" + type: "object" + properties: + header_key: + title: "Header Key" + type: "string" + value: + title: "Header Value" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "header_key" + - "value" + required: + - "host" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "weaviate" + const: "weaviate" + enum: + - "weaviate" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-weaviate-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "No external embedding" + type: "object" + properties: + mode: + title: "Mode" + default: "no_embedding" + const: "no_embedding" + enum: + - "no_embedding" + type: "string" + description: + "Do not calculate and pass embeddings to Weaviate. Suitable\ + \ for clusters with configured vectorizers to calculate embeddings within\ + \ Weaviate or for classes that should only support regular text search." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + - "mode" + description: + "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Weaviate cluster." + order: 1 + examples: + - "https://my-cluster.weaviate.network" + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Weaviate instance" + airbyte_secret: true + type: "string" + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Weaviate\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Weaviate cluster" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Weaviate cluster" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Weaviate clusters)" + - title: "No Authentication" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + batch_size: + title: "Batch Size" + description: "The number of records to send to Weaviate in each batch" + default: 128 + type: "integer" + text_field: + title: "Text Field" + description: "The field in the object that contains the embedded text" + default: "text" + type: "string" + tenant_id: + title: "Tenant ID" + description: "The tenant ID to use for multi tenancy" + default: "" + airbyte_secret: true + type: "string" + default_vectorizer: + title: "Default Vectorizer" + description: "The vectorizer to use if new classes need to be created" + default: "none" + enum: + - "none" + - "text2vec-cohere" + - "text2vec-huggingface" + - "text2vec-openai" + - "text2vec-palm" + - "text2vec-contextionary" + - "text2vec-transformers" + - "text2vec-gpt4all" + type: "string" + additional_headers: + title: "Additional headers" + description: "Additional HTTP headers to send with every request." + default: [] + examples: + - header_key: "X-OpenAI-Api-Key" + value: "my-openai-api-key" + type: "array" + items: + title: "Header" + type: "object" + properties: + header_key: + title: "Header Key" + type: "string" + value: + title: "Header Value" + airbyte_secret: true + type: "string" + required: + - "header_key" + - "value" + required: + - "host" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-mongodb: + title: "MongoDB Destination Spec" + type: "object" + required: + - "database" + - "auth_type" + - "destinationType" + properties: + instance_type: + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + \ Set TLS connection is used by default." + title: "MongoDb Instance Type" + type: "object" + order: 0 + oneOf: + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "The Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "The Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS Connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + auth_type: + title: "Authorization type" + type: "object" + description: "Authorization type." + oneOf: + - title: "None" + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + enum: + - "none" + - title: "Login/Password" + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + enum: + - "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "mongodb" + const: "mongodb" + enum: + - "mongodb" + order: 0 + type: "string" + destination-mongodb-update: + title: "MongoDB Destination Spec" + type: "object" + required: + - "database" + - "auth_type" + properties: + instance_type: + description: + "MongoDb instance to connect to. For MongoDB Atlas and Replica\ + \ Set TLS connection is used by default." + title: "MongoDb Instance Type" + type: "object" + order: 0 + oneOf: + - title: "Standalone MongoDb Instance" + required: + - "instance" + - "host" + - "port" + properties: + instance: + type: "string" + enum: + - "standalone" + default: "standalone" + host: + title: "Host" + type: "string" + description: "The Host of a Mongo database to be replicated." + order: 0 + port: + title: "Port" + type: "integer" + description: "The Port of a Mongo database to be replicated." + minimum: 0 + maximum: 65536 + default: 27017 + examples: + - "27017" + order: 1 + tls: + title: "TLS Connection" + type: "boolean" + description: + "Indicates whether TLS encryption protocol will be used\ + \ to connect to MongoDB. It is recommended to use TLS connection\ + \ if possible. For more information see documentation." + default: false + order: 2 + - title: "Replica Set" + required: + - "instance" + - "server_addresses" + properties: + instance: + type: "string" + enum: + - "replica" + default: "replica" + server_addresses: + title: "Server addresses" + type: "string" + description: + "The members of a replica set. Please specify `host`:`port`\ + \ of each member seperated by comma." + examples: + - "host1:27017,host2:27017,host3:27017" + order: 0 + replica_set: + title: "Replica Set" + type: "string" + description: "A replica set name." + order: 1 + - title: "MongoDB Atlas" + required: + - "instance" + - "cluster_url" + properties: + instance: + type: "string" + enum: + - "atlas" + default: "atlas" + cluster_url: + title: "Cluster URL" + type: "string" + description: "URL of a cluster to connect to." + order: 0 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + auth_type: + title: "Authorization type" + type: "object" + description: "Authorization type." + oneOf: + - title: "None" + description: "None." + required: + - "authorization" + type: "object" + properties: + authorization: + type: "string" + const: "none" + enum: + - "none" + - title: "Login/Password" + description: "Login/Password." + required: + - "authorization" + - "username" + - "password" + type: "object" + properties: + authorization: + type: "string" + const: "login/password" + enum: + - "login/password" + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-vectara: + title: "Vectara Config" + type: "object" + properties: + oauth2: + title: "OAuth2.0 Credentials" + type: "object" + properties: + client_id: + title: "OAuth Client ID" + description: "OAuth2.0 client id" + order: 0 + type: "string" + client_secret: + title: "OAuth Client Secret" + description: "OAuth2.0 client secret" + airbyte_secret: true + order: 1 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "client_id" + - "client_secret" + description: + "OAuth2.0 credentials used to authenticate admin actions (creating/deleting\ + \ corpora)" + group: "auth" + customer_id: + title: "Customer ID" + description: "Your customer id as it is in the authenticaion url" + order: 2 + group: "account" + type: "string" + corpus_name: + title: "Corpus Name" + description: "The Name of Corpus to load data into" + order: 3 + group: "account" + type: "string" + parallelize: + title: "Parallelize" + description: "Parallelize indexing into Vectara with multiple threads" + default: false + always_show: true + group: "account" + type: "boolean" + text_fields: + title: "Text fields to index with Vectara" + description: + "List of fields in the record that should be in the section\ + \ of the document. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all fields\ + \ are considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + title_field: + title: "Text field to use as document title with Vectara" + description: + "A field that will be used to populate the `title` of each\ + \ document. The field list is applied to all streams in the same way and\ + \ non-existing fields are ignored. If none are defined, all fields are\ + \ considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: "" + always_show: true + examples: + - "document_key" + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as metadata.\ + \ The field list is applied to all streams in the same way and non-existing\ + \ fields are ignored. If none are defined, all fields are considered metadata\ + \ fields. When specifying text fields, you can access nested fields in\ + \ the record by using dot notation, e.g. `user.name` will access the `name`\ + \ field in the `user` object. It's also possible to use wildcards to access\ + \ all fields in an object, e.g. `users.*.name` will access all `names`\ + \ fields in all entries of the `users` array. When specifying nested paths,\ + \ all matching values are flattened into an array set to a field named\ + \ by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + type: "array" + items: + type: "string" + destinationType: + title: "vectara" + const: "vectara" + enum: + - "vectara" + order: 0 + type: "string" + required: + - "oauth2" + - "customer_id" + - "corpus_name" + - "destinationType" + description: "Configuration to connect to the Vectara instance" + groups: + - id: "account" + title: "Account" + - id: "auth" + title: "Authentication" + destination-vectara-update: + title: "Vectara Config" + type: "object" + properties: + oauth2: + title: "OAuth2.0 Credentials" + type: "object" + properties: + client_id: + title: "OAuth Client ID" + description: "OAuth2.0 client id" + order: 0 + type: "string" + client_secret: + title: "OAuth Client Secret" + description: "OAuth2.0 client secret" + airbyte_secret: true + order: 1 + type: "string" + required: + - "client_id" + - "client_secret" + description: + "OAuth2.0 credentials used to authenticate admin actions (creating/deleting\ + \ corpora)" + group: "auth" + customer_id: + title: "Customer ID" + description: "Your customer id as it is in the authenticaion url" + order: 2 + group: "account" + type: "string" + corpus_name: + title: "Corpus Name" + description: "The Name of Corpus to load data into" + order: 3 + group: "account" + type: "string" + parallelize: + title: "Parallelize" + description: "Parallelize indexing into Vectara with multiple threads" + default: false + always_show: true + group: "account" + type: "boolean" + text_fields: + title: "Text fields to index with Vectara" + description: + "List of fields in the record that should be in the section\ + \ of the document. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all fields\ + \ are considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + title_field: + title: "Text field to use as document title with Vectara" + description: + "A field that will be used to populate the `title` of each\ + \ document. The field list is applied to all streams in the same way and\ + \ non-existing fields are ignored. If none are defined, all fields are\ + \ considered text fields. When specifying text fields, you can access\ + \ nested fields in the record by using dot notation, e.g. `user.name`\ + \ will access the `name` field in the `user` object. It's also possible\ + \ to use wildcards to access all fields in an object, e.g. `users.*.name`\ + \ will access all `names` fields in all entries of the `users` array." + default: "" + always_show: true + examples: + - "document_key" + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as metadata.\ + \ The field list is applied to all streams in the same way and non-existing\ + \ fields are ignored. If none are defined, all fields are considered metadata\ + \ fields. When specifying text fields, you can access nested fields in\ + \ the record by using dot notation, e.g. `user.name` will access the `name`\ + \ field in the `user` object. It's also possible to use wildcards to access\ + \ all fields in an object, e.g. `users.*.name` will access all `names`\ + \ fields in all entries of the `users` array. When specifying nested paths,\ + \ all matching values are flattened into an array set to a field named\ + \ by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + type: "array" + items: + type: "string" + required: + - "oauth2" + - "customer_id" + - "corpus_name" + description: "Configuration to connect to the Vectara instance" + groups: + - id: "account" + title: "Account" + - id: "auth" + title: "Authentication" + destination-s3-glue: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + - "destinationType" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 4 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "Root level flattening" + enum: + - "No flattening" + - "Root level flattening" + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: + "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: + "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + destinationType: + title: "s3-glue" + const: "s3-glue" + enum: + - "s3-glue" + order: 0 + type: "string" + destination-s3-glue-update: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "glue_database" + - "glue_serialization_library" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 3 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 4 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "Root level flattening" + enum: + - "No flattening" + - "Root level flattening" + order: 5 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 6 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 7 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 8 + glue_database: + type: "string" + description: + "Name of the glue database for creating the tables, leave blank\ + \ if no integration" + title: "Glue database name" + examples: + - "airbyte_database" + order: 9 + glue_serialization_library: + title: "Serialization Library" + description: + "The library that your query engine will use for reading and\ + \ writing data in your lake." + type: "string" + enum: + - "org.openx.data.jsonserde.JsonSerDe" + - "org.apache.hive.hcatalog.data.JsonSerDe" + default: "org.openx.data.jsonserde.JsonSerDe" + order: 10 + destination-dev-null: + title: "E2E Test Destination Spec" + type: "object" + required: + - "test_destination" + - "destinationType" + properties: + test_destination: + title: "Test Destination" + type: "object" + description: "The type of destination to be used" + oneOf: + - title: "Logging" + required: + - "test_destination_type" + - "logging_config" + properties: + test_destination_type: + type: "string" + const: "LOGGING" + default: "LOGGING" + enum: + - "LOGGING" + logging_config: + title: "Logging Configuration" + type: "object" + description: "Configurate how the messages are logged." + oneOf: + - title: "First N Entries" + description: "Log first N entries per stream." + type: "object" + required: + - "logging_type" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "FirstN" + default: "FirstN" + max_entry_count: + title: "N" + description: + "Number of entries to log. This destination is\ + \ for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Every N-th Entry" + description: + "For each stream, log every N-th entry with a maximum\ + \ cap." + type: "object" + required: + - "logging_type" + - "nth_entry_to_log" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "EveryNth" + default: "EveryNth" + nth_entry_to_log: + title: "N" + description: + "The N-th entry to log for each stream. N starts\ + \ from 1. For example, when N = 1, every entry is logged;\ + \ when N = 2, every other entry is logged; when N = 3, one\ + \ out of three entries is logged." + type: "number" + example: + - 3 + minimum: 1 + maximum: 1000 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Random Sampling" + description: + "For each stream, randomly log a percentage of the\ + \ entries with a maximum cap." + type: "object" + required: + - "logging_type" + - "sampling_ratio" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "RandomSampling" + default: "RandomSampling" + sampling_ratio: + title: "Sampling Ratio" + description: "A positive floating number smaller than 1." + type: "number" + default: 0.001 + examples: + - 0.001 + minimum: 0 + maximum: 1 + seed: + title: "Random Number Generator Seed" + description: + "When the seed is unspecified, the current time\ + \ millis will be used as the seed." + type: "number" + examples: + - 1900 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Silent" + required: + - "test_destination_type" + properties: + test_destination_type: + type: "string" + const: "SILENT" + default: "SILENT" + enum: + - "SILENT" + - title: "Throttled" + required: + - "test_destination_type" + - "millis_per_record" + properties: + test_destination_type: + type: "string" + const: "THROTTLED" + default: "THROTTLED" + enum: + - "THROTTLED" + millis_per_record: + description: "Number of milli-second to pause in between records." + type: "integer" + - title: "Failing" + required: + - "test_destination_type" + - "num_messages" + properties: + test_destination_type: + type: "string" + const: "FAILING" + default: "FAILING" + enum: + - "FAILING" + num_messages: + description: "Number of messages after which to fail." + type: "integer" + destinationType: + title: "dev-null" + const: "dev-null" + enum: + - "dev-null" + order: 0 + type: "string" + destination-dev-null-update: + title: "E2E Test Destination Spec" + type: "object" + required: + - "test_destination" + properties: + test_destination: + title: "Test Destination" + type: "object" + description: "The type of destination to be used" + oneOf: + - title: "Logging" + required: + - "test_destination_type" + - "logging_config" + properties: + test_destination_type: + type: "string" + const: "LOGGING" + default: "LOGGING" + enum: + - "LOGGING" + logging_config: + title: "Logging Configuration" + type: "object" + description: "Configurate how the messages are logged." + oneOf: + - title: "First N Entries" + description: "Log first N entries per stream." + type: "object" + required: + - "logging_type" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "FirstN" + default: "FirstN" + max_entry_count: + title: "N" + description: + "Number of entries to log. This destination is\ + \ for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Every N-th Entry" + description: + "For each stream, log every N-th entry with a maximum\ + \ cap." + type: "object" + required: + - "logging_type" + - "nth_entry_to_log" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "EveryNth" + default: "EveryNth" + nth_entry_to_log: + title: "N" + description: + "The N-th entry to log for each stream. N starts\ + \ from 1. For example, when N = 1, every entry is logged;\ + \ when N = 2, every other entry is logged; when N = 3, one\ + \ out of three entries is logged." + type: "number" + example: + - 3 + minimum: 1 + maximum: 1000 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Random Sampling" + description: + "For each stream, randomly log a percentage of the\ + \ entries with a maximum cap." + type: "object" + required: + - "logging_type" + - "sampling_ratio" + - "max_entry_count" + properties: + logging_type: + type: "string" + enum: + - "RandomSampling" + default: "RandomSampling" + sampling_ratio: + title: "Sampling Ratio" + description: "A positive floating number smaller than 1." + type: "number" + default: 0.001 + examples: + - 0.001 + minimum: 0 + maximum: 1 + seed: + title: "Random Number Generator Seed" + description: + "When the seed is unspecified, the current time\ + \ millis will be used as the seed." + type: "number" + examples: + - 1900 + max_entry_count: + title: "Max Log Entries" + description: + "Max number of entries to log. This destination\ + \ is for testing only. So it won't make sense to log infinitely.\ + \ The maximum is 1,000 entries." + type: "number" + default: 100 + examples: + - 100 + minimum: 1 + maximum: 1000 + - title: "Silent" + required: + - "test_destination_type" + properties: + test_destination_type: + type: "string" + const: "SILENT" + default: "SILENT" + enum: + - "SILENT" + - title: "Throttled" + required: + - "test_destination_type" + - "millis_per_record" + properties: + test_destination_type: + type: "string" + const: "THROTTLED" + default: "THROTTLED" + enum: + - "THROTTLED" + millis_per_record: + description: "Number of milli-second to pause in between records." + type: "integer" + - title: "Failing" + required: + - "test_destination_type" + - "num_messages" + properties: + test_destination_type: + type: "string" + const: "FAILING" + default: "FAILING" + enum: + - "FAILING" + num_messages: + description: "Number of messages after which to fail." + type: "integer" + destination-snowflake-cortex: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Snowflake Connection" + type: "object" + properties: + host: + title: "Host" + description: + "Enter the account name you want to use to access the database.\ + \ This is usually the identifier before .snowflakecomputing.com" + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + role: + title: "Role" + description: "Enter the role that you want to use to access Snowflake" + order: 2 + examples: + - "AIRBYTE_ROLE" + - "ACCOUNTADMIN" + type: "string" + warehouse: + title: "Warehouse" + description: + "Enter the name of the warehouse that you want to use as\ + \ a compute cluster" + order: 3 + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "default_schema" + - "username" + - "credentials" + description: "Snowflake can be used to store vector data and retrieve embeddings." + group: "indexing" + destinationType: + title: "snowflake-cortex" + const: "snowflake-cortex" + enum: + - "snowflake-cortex" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-snowflake-cortex-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Snowflake Connection" + type: "object" + properties: + host: + title: "Host" + description: + "Enter the account name you want to use to access the database.\ + \ This is usually the identifier before .snowflakecomputing.com" + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + role: + title: "Role" + description: "Enter the role that you want to use to access Snowflake" + order: 2 + examples: + - "AIRBYTE_ROLE" + - "ACCOUNTADMIN" + type: "string" + warehouse: + title: "Warehouse" + description: + "Enter the name of the warehouse that you want to use as\ + \ a compute cluster" + order: 3 + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + required: + - "password" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "default_schema" + - "username" + - "credentials" + description: "Snowflake can be used to store vector data and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-timeplus: + title: "Destination Timeplus" + type: "object" + required: + - "endpoint" + - "apikey" + - "destinationType" + properties: + endpoint: + title: "Endpoint" + description: "Timeplus workspace endpoint" + type: "string" + default: "https://us-west-2.timeplus.cloud/" + examples: + - "https://us-west-2.timeplus.cloud/workspace_id" + order: 0 + apikey: + title: "API key" + description: "Personal API key" + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + destinationType: + title: "timeplus" + const: "timeplus" + enum: + - "timeplus" + order: 0 + type: "string" + destination-timeplus-update: + title: "Destination Timeplus" + type: "object" + required: + - "endpoint" + - "apikey" + properties: + endpoint: + title: "Endpoint" + description: "Timeplus workspace endpoint" + type: "string" + default: "https://us-west-2.timeplus.cloud/" + examples: + - "https://us-west-2.timeplus.cloud/workspace_id" + order: 0 + apikey: + title: "API key" + description: "Personal API key" + type: "string" + airbyte_secret: true + order: 1 + destination-convex: + title: "Destination Convex" + type: "object" + required: + - "deployment_url" + - "access_key" + - "destinationType" + properties: + deployment_url: + type: "string" + description: "URL of the Convex deployment that is the destination" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + description: "API access key used to send data to a Convex deployment." + airbyte_secret: "true" + x-speakeasy-param-sensitive: true + destinationType: + title: "convex" + const: "convex" + enum: + - "convex" + order: 0 + type: "string" + destination-convex-update: + title: "Destination Convex" + type: "object" + required: + - "deployment_url" + - "access_key" + properties: + deployment_url: + type: "string" + description: "URL of the Convex deployment that is the destination" + examples: + - "https://murky-swan-635.convex.cloud" + - "https://cluttered-owl-337.convex.cloud" + access_key: + type: "string" + description: "API access key used to send data to a Convex deployment." + airbyte_secret: "true" + destination-firestore: + title: "Destination Google Firestore" + type: "object" + required: + - "project_id" + - "destinationType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + x-speakeasy-param-sensitive: true + destinationType: + title: "firestore" + const: "firestore" + enum: + - "firestore" + order: 0 + type: "string" + destination-firestore-update: + title: "Destination Google Firestore" + type: "object" + required: + - "project_id" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset." + title: "Project ID" + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Credentials JSON" + airbyte_secret: true + destination-redshift: + title: "Redshift Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" + - "destinationType" + properties: + host: + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)" + type: "string" + title: "Host" + group: "connection" + order: 1 + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + title: "Port" + group: "connection" + order: 2 + username: + description: "Username to use to access the database." + type: "string" + title: "Username" + group: "connection" + order: 3 + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + group: "connection" + order: 4 + x-speakeasy-param-sensitive: true + database: + description: "Name of the database." + type: "string" + title: "Database" + group: "connection" + order: 5 + schema: + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. Unless specifically configured, the usual value\ + \ for this field is \"public\"." + type: "string" + examples: + - "public" + default: "public" + group: "connection" + title: "Default Schema" + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + group: "connection" + order: 7 + uploading_method: + title: "Uploading Method" + type: "object" + description: "The way data will be uploaded to Redshift." + group: "connection" + order: 8 + display_type: "radio" + oneOf: + - title: "AWS S3 Staging" + description: + "(recommended) Uploads data to S3 and then uses a\ + \ COPY to insert the data into Redshift. COPY is recommended for production\ + \ workloads for better speed and scalability. See AWS docs for more details." + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + enum: + - "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket." + examples: + - "airbyte.staging" + order: 0 + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: + "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 2 + access_key_id: + type: "string" + description: + "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key Id" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Secret Access Key" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for\ + \ the S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: + "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)." + title: "Destinations V2 Raw Table Schema" + order: 9 + group: "tables" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + group: "tables" + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 12 + group: "tables" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "redshift" + const: "redshift" + enum: + - "redshift" + order: 0 + type: "string" + groups: + - id: "connection" + title: "Connection" + - id: "tables" + title: "Tables" + destination-redshift-update: + title: "Redshift Destination Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + - "password" + - "schema" + properties: + host: + description: + "Host Endpoint of the Redshift Cluster (must include the cluster-id,\ + \ region and end with .redshift.amazonaws.com)" + type: "string" + title: "Host" + group: "connection" + order: 1 + port: + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5439 + examples: + - "5439" + title: "Port" + group: "connection" + order: 2 + username: + description: "Username to use to access the database." + type: "string" + title: "Username" + group: "connection" + order: 3 + password: + description: "Password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + group: "connection" + order: 4 + database: + description: "Name of the database." + type: "string" + title: "Database" + group: "connection" + order: 5 + schema: + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. Unless specifically configured, the usual value\ + \ for this field is \"public\"." + type: "string" + examples: + - "public" + default: "public" + group: "connection" + title: "Default Schema" + order: 6 + jdbc_url_params: + title: "JDBC URL Params" + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + group: "connection" + order: 7 + uploading_method: + title: "Uploading Method" + type: "object" + description: "The way data will be uploaded to Redshift." + group: "connection" + order: 8 + display_type: "radio" + oneOf: + - title: "AWS S3 Staging" + description: + "(recommended) Uploads data to S3 and then uses a\ + \ COPY to insert the data into Redshift. COPY is recommended for production\ + \ workloads for better speed and scalability. See AWS docs for more details." + required: + - "method" + - "s3_bucket_name" + - "s3_bucket_region" + - "access_key_id" + - "secret_access_key" + properties: + method: + type: "string" + const: "S3 Staging" + enum: + - "S3 Staging" + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: "The name of the staging S3 bucket." + examples: + - "airbyte.staging" + order: 0 + s3_bucket_path: + title: "S3 Bucket Path" + type: "string" + description: + "The directory under the S3 bucket where data will be\ + \ written. If not provided, then defaults to the root directory.\ + \ See path's name recommendations for more details." + examples: + - "data_sync/test" + order: 1 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: "The region of the S3 staging bucket." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 2 + access_key_id: + type: "string" + description: + "This ID grants access to the above S3 staging bucket.\ + \ Airbyte requires Read and Write permissions to the given bucket.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Access Key Id" + airbyte_secret: true + order: 3 + secret_access_key: + type: "string" + description: + "The corresponding secret to the above access key id.\ + \ See AWS docs on how to generate an access key ID and secret access\ + \ key." + title: "S3 Secret Access Key" + airbyte_secret: true + order: 4 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for\ + \ the S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 5 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: + "Whether to delete the staging files from S3 after completing\ + \ the sync. See docs for details." + default: true + order: 6 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)." + title: "Destinations V2 Raw Table Schema" + order: 9 + group: "tables" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + group: "tables" + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 12 + group: "tables" + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + groups: + - id: "connection" + title: "Connection" + - id: "tables" + title: "Tables" + destination-dynamodb: + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name_prefix" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + - "destinationType" + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name_prefix: + title: "Table name prefix" + type: "string" + description: "The prefix to use when naming DynamoDB tables." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + access_key_id: + type: "string" + description: + "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + x-speakeasy-param-sensitive: true + destinationType: + title: "dynamodb" + const: "dynamodb" + enum: + - "dynamodb" + order: 0 + type: "string" + destination-dynamodb-update: + title: "DynamoDB Destination Spec" + type: "object" + required: + - "dynamodb_table_name_prefix" + - "dynamodb_region" + - "access_key_id" + - "secret_access_key" + properties: + dynamodb_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "This is your DynamoDB endpoint url.(if you are working with\ + \ AWS DynamoDB, just leave empty)." + examples: + - "http://localhost:9000" + dynamodb_table_name_prefix: + title: "Table name prefix" + type: "string" + description: "The prefix to use when naming DynamoDB tables." + examples: + - "airbyte_sync" + dynamodb_region: + title: "DynamoDB Region" + type: "string" + default: "" + description: "The region of the DynamoDB." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + access_key_id: + type: "string" + description: + "The access key id to access the DynamoDB. Airbyte requires\ + \ Read and Write permissions to the DynamoDB." + title: "DynamoDB Key Id" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + secret_access_key: + type: "string" + description: "The corresponding secret to the access key id." + title: "DynamoDB Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + destination-qdrant: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + url: + title: "Public Endpoint" + description: "Public Endpoint of the Qdrant cluser" + order: 0 + type: "string" + auth_method: + title: "Authentication Method" + description: "Method to authenticate with the Qdrant Instance" + default: "api_key_auth" + type: "object" + order: 1 + oneOf: + - title: "ApiKeyAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "api_key_auth" + const: "api_key_auth" + enum: + - "api_key_auth" + type: "string" + api_key: + title: "API Key" + description: "API Key for the Qdrant instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "api_key" + - title: "NoAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + prefer_grpc: + title: "Prefer gRPC" + description: + "Whether to prefer gRPC over HTTP. Set to true for Qdrant\ + \ cloud clusters" + default: true + type: "boolean" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 2 + type: "string" + distance_metric: + title: "Distance Metric" + description: + "The Distance metric used to measure similarities among\ + \ vectors. This field is only used if the collection defined in the\ + \ does not exist yet and is created automatically by the connector." + default: "cos" + enum: + - "dot" + - "cos" + - "euc" + type: "string" + text_field: + title: "Text Field" + description: "The field in the payload that contains the embedded text" + default: "text" + type: "string" + required: + - "url" + - "collection" + group: "Indexing" + description: "Indexing configuration" + destinationType: + title: "qdrant" + const: "qdrant" + enum: + - "qdrant" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-qdrant-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + url: + title: "Public Endpoint" + description: "Public Endpoint of the Qdrant cluser" + order: 0 + type: "string" + auth_method: + title: "Authentication Method" + description: "Method to authenticate with the Qdrant Instance" + default: "api_key_auth" + type: "object" + order: 1 + oneOf: + - title: "ApiKeyAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "api_key_auth" + const: "api_key_auth" + enum: + - "api_key_auth" + type: "string" + api_key: + title: "API Key" + description: "API Key for the Qdrant instance" + airbyte_secret: true + type: "string" + required: + - "api_key" + - title: "NoAuth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + prefer_grpc: + title: "Prefer gRPC" + description: + "Whether to prefer gRPC over HTTP. Set to true for Qdrant\ + \ cloud clusters" + default: true + type: "boolean" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 2 + type: "string" + distance_metric: + title: "Distance Metric" + description: + "The Distance metric used to measure similarities among\ + \ vectors. This field is only used if the collection defined in the\ + \ does not exist yet and is created automatically by the connector." + default: "cos" + enum: + - "dot" + - "cos" + - "euc" + type: "string" + text_field: + title: "Text Field" + description: "The field in the payload that contains the embedded text" + default: "text" + type: "string" + required: + - "url" + - "collection" + group: "Indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-snowflake: + title: "Snowflake Destination Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + - "destinationType" + properties: + host: + description: + "Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)" + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" + type: "string" + title: "Host" + pattern: + "^(http(s)?:\\/\\/)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\\ + .(snowflakecomputing\\.com|localstack\\.cloud))$" + pattern_descriptor: "{account_name}.snowflakecomputing.com or {accountname}.{aws_location}.aws.snowflakecomputing.com" + order: 0 + role: + description: + "Enter the role that you want to use to access Snowflake" + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 1 + warehouse: + description: + "Enter the name of the warehouse that you want to use as a compute cluster" + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 2 + database: + description: + "Enter the name of the database you want to sync data into" + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 3 + schema: + description: + "Enter the name of the default schema" + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Default Schema" + order: 4 + username: + description: "Enter the name of the user you want to use to access the database" + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 5 + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Key Pair Authentication" + type: "object" + order: 0 + required: + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + enum: + - "Key Pair Authentication" + default: "Key Pair Authentication" + order: 0 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + x-speakeasy-param-sensitive: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Username and Password" + type: "object" + required: + - "password" + order: 1 + properties: + auth_type: + type: "string" + const: "Username and Password" + enum: + - "Username and Password" + default: "Username and Password" + order: 0 + password: + description: "Enter the password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 1 + x-speakeasy-param-sensitive: true + - title: "OAuth2.0" + type: "object" + order: 2 + required: + - "access_token" + - "refresh_token" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + default: "OAuth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "Enter your application's Client ID" + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + type: "string" + title: "Client Secret" + description: "Enter your application's Client secret" + airbyte_secret: true + x-speakeasy-param-sensitive: true + access_token: + type: "string" + title: "Access Token" + description: "Enter you application's Access Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Enter your application's Refresh Token" + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 6 + jdbc_url_params: + description: + "Enter the additional properties to pass to the JDBC URL string\ + \ when connecting to the database (formatted as key=value pairs separated\ + \ by the symbol &). Example: key1=value1&key2=value2&key3=value3" + title: "JDBC URL Params" + type: "string" + order: 7 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 10 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + retention_period_days: + type: "integer" + default: 1 + description: + "The number of days of Snowflake Time Travel to enable on the\ + \ tables. See Snowflake's documentation for more information. Setting a nonzero\ + \ value will incur increased storage costs in your Snowflake instance." + title: "Data Retention Period (days)" + order: 13 + use_merge_for_upsert: + type: "boolean" + default: false + description: + "Use MERGE for de-duplication of final tables. This option\ + \ no effect if Final tables are disabled or Sync mode is not DEDUPE" + title: "Use MERGE for De-duplication of final tables" + order: 14 + destinationType: + title: "snowflake" + const: "snowflake" + enum: + - "snowflake" + order: 0 + type: "string" + destination-snowflake-update: + title: "Snowflake Destination Spec" + type: "object" + required: + - "host" + - "role" + - "warehouse" + - "database" + - "schema" + - "username" + properties: + host: + description: + "Enter your Snowflake account's locator (in the format ...snowflakecomputing.com)" + examples: + - "accountname.us-east-2.aws.snowflakecomputing.com" + - "accountname.snowflakecomputing.com" + type: "string" + title: "Host" + pattern: + "^(http(s)?:\\/\\/)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\.)?([^./?#]+\\\ + .(snowflakecomputing\\.com|localstack\\.cloud))$" + pattern_descriptor: "{account_name}.snowflakecomputing.com or {accountname}.{aws_location}.aws.snowflakecomputing.com" + order: 0 + role: + description: + "Enter the role that you want to use to access Snowflake" + examples: + - "AIRBYTE_ROLE" + type: "string" + title: "Role" + order: 1 + warehouse: + description: + "Enter the name of the warehouse that you want to use as a compute cluster" + examples: + - "AIRBYTE_WAREHOUSE" + type: "string" + title: "Warehouse" + order: 2 + database: + description: + "Enter the name of the database you want to sync data into" + examples: + - "AIRBYTE_DATABASE" + type: "string" + title: "Database" + order: 3 + schema: + description: + "Enter the name of the default schema" + examples: + - "AIRBYTE_SCHEMA" + type: "string" + title: "Default Schema" + order: 4 + username: + description: "Enter the name of the user you want to use to access the database" + examples: + - "AIRBYTE_USER" + type: "string" + title: "Username" + order: 5 + credentials: + title: "Authorization Method" + description: "" + type: "object" + oneOf: + - title: "Key Pair Authentication" + type: "object" + order: 0 + required: + - "private_key" + properties: + auth_type: + type: "string" + const: "Key Pair Authentication" + enum: + - "Key Pair Authentication" + default: "Key Pair Authentication" + order: 0 + private_key: + type: "string" + title: "Private Key" + description: + "RSA Private key to use for Snowflake connection. See\ + \ the docs for more information on how to obtain this key." + multiline: true + airbyte_secret: true + private_key_password: + type: "string" + title: "Passphrase" + description: "Passphrase for private key" + airbyte_secret: true + - title: "Username and Password" + type: "object" + required: + - "password" + order: 1 + properties: + auth_type: + type: "string" + const: "Username and Password" + enum: + - "Username and Password" + default: "Username and Password" + order: 0 + password: + description: "Enter the password associated with the username." + type: "string" + airbyte_secret: true + title: "Password" + order: 1 + - title: "OAuth2.0" + type: "object" + order: 2 + required: + - "access_token" + - "refresh_token" + airbyte_hidden: true + properties: + auth_type: + type: "string" + const: "OAuth2.0" + enum: + - "OAuth2.0" + default: "OAuth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "Enter your application's Client ID" + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "Enter your application's Client secret" + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Enter you application's Access Token" + airbyte_secret: true + refresh_token: + type: "string" + title: "Refresh Token" + description: "Enter your application's Refresh Token" + airbyte_secret: true + order: 6 + jdbc_url_params: + description: + "Enter the additional properties to pass to the JDBC URL string\ + \ when connecting to the database (formatted as key=value pairs separated\ + \ by the symbol &). Example: key1=value1&key2=value2&key3=value3" + title: "JDBC URL Params" + type: "string" + order: 7 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 10 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 11 + retention_period_days: + type: "integer" + default: 1 + description: + "The number of days of Snowflake Time Travel to enable on the\ + \ tables. See Snowflake's documentation for more information. Setting a nonzero\ + \ value will incur increased storage costs in your Snowflake instance." + title: "Data Retention Period (days)" + order: 13 + use_merge_for_upsert: + type: "boolean" + default: false + description: + "Use MERGE for de-duplication of final tables. This option\ + \ no effect if Final tables are disabled or Sync mode is not DEDUPE" + title: "Use MERGE for De-duplication of final tables" + order: 14 + destination-databricks: + title: "Databricks Lakehouse Destination Spec" + type: "object" + required: + - "accept_terms" + - "hostname" + - "http_path" + - "database" + - "authentication" + - "destinationType" + properties: + accept_terms: + title: "Agree to the Databricks JDBC Driver Terms & Conditions" + type: "boolean" + description: + "You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector." + default: false + order: 1 + hostname: + title: "Server Hostname" + type: "string" + description: "Databricks Cluster Server Hostname." + examples: + - "abc-12345678-wxyz.cloud.databricks.com" + order: 2 + http_path: + title: "HTTP Path" + type: "string" + description: "Databricks Cluster HTTP Path." + examples: + - "sql/1.0/warehouses/0000-1111111-abcd90" + order: 3 + port: + title: "Port" + type: "string" + description: "Databricks Cluster Port." + default: "443" + examples: + - "443" + order: 4 + database: + title: "Databricks Unity Catalog Name" + description: "The name of the unity catalog for the database" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema tables are written. If not specified otherwise,\ + \ the \"default\" will be used." + type: "string" + examples: + - "default" + default: "default" + order: 6 + authentication: + title: "Authentication" + type: "object" + description: "Authentication mechanism for Staging files and running queries" + default: "OAUTH" + order: 8 + oneOf: + - title: "OAuth2 (Recommended)" + required: + - "auth_type" + - "client_id" + - "secret" + properties: + auth_type: + type: "string" + const: "OAUTH" + order: 0 + enum: + - "OAUTH" + client_id: + type: "string" + order: 1 + secret: + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + - title: "Personal Access Token" + required: + - "auth_type" + - "personal_access_token" + properties: + auth_type: + type: "string" + const: "BASIC" + order: 0 + enum: + - "BASIC" + personal_access_token: + type: "string" + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Default to 'true'. Switch it to 'false' for debugging purpose." + default: true + order: 9 + raw_schema_override: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + default: "airbyte_internal" + order: 10 + destinationType: + title: "databricks" + const: "databricks" + enum: + - "databricks" + order: 0 + type: "string" + destination-databricks-update: + title: "Databricks Lakehouse Destination Spec" + type: "object" + required: + - "accept_terms" + - "hostname" + - "http_path" + - "database" + - "authentication" + properties: + accept_terms: + title: "Agree to the Databricks JDBC Driver Terms & Conditions" + type: "boolean" + description: + "You must agree to the Databricks JDBC Driver Terms & Conditions to use this connector." + default: false + order: 1 + hostname: + title: "Server Hostname" + type: "string" + description: "Databricks Cluster Server Hostname." + examples: + - "abc-12345678-wxyz.cloud.databricks.com" + order: 2 + http_path: + title: "HTTP Path" + type: "string" + description: "Databricks Cluster HTTP Path." + examples: + - "sql/1.0/warehouses/0000-1111111-abcd90" + order: 3 + port: + title: "Port" + type: "string" + description: "Databricks Cluster Port." + default: "443" + examples: + - "443" + order: 4 + database: + title: "Databricks Unity Catalog Name" + description: "The name of the unity catalog for the database" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema tables are written. If not specified otherwise,\ + \ the \"default\" will be used." + type: "string" + examples: + - "default" + default: "default" + order: 6 + authentication: + title: "Authentication" + type: "object" + description: "Authentication mechanism for Staging files and running queries" + default: "OAUTH" + order: 8 + oneOf: + - title: "OAuth2 (Recommended)" + required: + - "auth_type" + - "client_id" + - "secret" + properties: + auth_type: + type: "string" + const: "OAUTH" + order: 0 + enum: + - "OAUTH" + client_id: + type: "string" + order: 1 + secret: + type: "string" + airbyte_secret: true + order: 2 + - title: "Personal Access Token" + required: + - "auth_type" + - "personal_access_token" + properties: + auth_type: + type: "string" + const: "BASIC" + order: 0 + enum: + - "BASIC" + personal_access_token: + type: "string" + airbyte_secret: true + order: 1 + purge_staging_data: + title: "Purge Staging Files and Tables" + type: "boolean" + description: "Default to 'true'. Switch it to 'false' for debugging purpose." + default: true + order: 9 + raw_schema_override: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + default: "airbyte_internal" + order: 10 + destination-oracle: + title: "Oracle Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sid" + - "destinationType" + properties: + host: + title: "Host" + description: "The hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + examples: + - "1521" + order: 1 + sid: + title: "SID" + description: + "The System Identifier uniquely distinguishes the instance\ + \ from any other instance on the same computer." + type: "string" + order: 2 + username: + title: "User" + description: + "The username to access the database. This user must have CREATE\ + \ USER privileges in the database." + type: "string" + order: 3 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema is used as the target schema for all statements\ + \ issued from the connection that do not explicitly specify a schema name.\ + \ The usual value for this field is \"airbyte\". In Oracle, schemas and\ + \ users are the same thing, so the \"user\" parameter is used as the login\ + \ credentials and this is used for the default Airbyte message schema." + type: "string" + examples: + - "airbyte" + default: "airbyte" + order: 6 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the database encryption algorithm." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "oracle" + const: "oracle" + enum: + - "oracle" + order: 0 + type: "string" + destination-oracle-update: + title: "Oracle Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "sid" + properties: + host: + title: "Host" + description: "The hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "The port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 1521 + examples: + - "1521" + order: 1 + sid: + title: "SID" + description: + "The System Identifier uniquely distinguishes the instance\ + \ from any other instance on the same computer." + type: "string" + order: 2 + username: + title: "User" + description: + "The username to access the database. This user must have CREATE\ + \ USER privileges in the database." + type: "string" + order: 3 + password: + title: "Password" + description: "The password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 5 + schema: + title: "Default Schema" + description: + "The default schema is used as the target schema for all statements\ + \ issued from the connection that do not explicitly specify a schema name.\ + \ The usual value for this field is \"airbyte\". In Oracle, schemas and\ + \ users are the same thing, so the \"user\" parameter is used as the login\ + \ credentials and this is used for the default Airbyte message schema." + type: "string" + examples: + - "airbyte" + default: "airbyte" + order: 6 + encryption: + title: "Encryption" + type: "object" + description: + "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Unencrypted" + description: "Data transfer will not be encrypted." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "unencrypted" + enum: + - "unencrypted" + default: "unencrypted" + - title: "Native Network Encryption (NNE)" + description: + "The native network encryption gives you the ability to encrypt\ + \ database connections, without the configuration overhead of TCP/IP\ + \ and SSL/TLS and without the need to open and listen on different ports." + required: + - "encryption_method" + properties: + encryption_method: + type: "string" + const: "client_nne" + enum: + - "client_nne" + default: "client_nne" + encryption_algorithm: + type: "string" + description: "This parameter defines the database encryption algorithm." + title: "Encryption Algorithm" + default: "AES256" + enum: + - "AES256" + - "RC4_56" + - "3DES168" + - title: "TLS Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "encryption_method" + - "ssl_certificate" + properties: + encryption_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + default: "encrypted_verify_certificate" + ssl_certificate: + title: "SSL PEM file" + description: + "Privacy Enhanced Mail (PEM) files are concatenated certificate\ + \ containers frequently used in certificate installations." + type: "string" + airbyte_secret: true + multiline: true + raw_data_schema: + type: "string" + description: "The schema to write raw tables into (default: airbyte_internal)" + title: "Raw Table Schema Name" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-aws-datalake: + title: "AWS Datalake Destination Spec" + type: "object" + required: + - "credentials" + - "region" + - "bucket_name" + - "lakeformation_database_name" + - "destinationType" + properties: + aws_account_id: + type: "string" + title: "AWS Account Id" + description: "target aws account id" + examples: + - "111111111111" + order: 1 + credentials: + title: "Authentication mode" + description: "Choose How to Authenticate to AWS." + type: "object" + oneOf: + - type: "object" + title: "IAM Role" + required: + - "role_arn" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM Role" + enum: + - "IAM Role" + default: "IAM Role" + order: 0 + role_arn: + title: "Target Role Arn" + type: "string" + description: "Will assume this role to write data to s3" + airbyte_secret: false + x-speakeasy-param-sensitive: true + - type: "object" + title: "IAM User" + required: + - "credentials_title" + - "aws_access_key_id" + - "aws_secret_access_key" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM User" + enum: + - "IAM User" + default: "IAM User" + order: 0 + aws_access_key_id: + title: "Access Key Id" + type: "string" + description: "AWS User Access Key Id" + airbyte_secret: true + x-speakeasy-param-sensitive: true + aws_secret_access_key: + title: "Secret Access Key" + type: "string" + description: "Secret Access Key" + airbyte_secret: true + x-speakeasy-param-sensitive: true + order: 2 + region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + order: 4 + bucket_prefix: + title: "Target S3 Bucket Prefix" + type: "string" + description: "S3 prefix" + order: 5 + lakeformation_database_name: + title: "Lake Formation Database Name" + type: "string" + description: + "The default database this destination will use to create tables\ + \ in per stream. Can be changed per connection by customizing the namespace." + order: 6 + lakeformation_database_default_tag_key: + title: "Lake Formation Database Tag Key" + description: "Add a default tag key to databases created by this destination" + examples: + - "pii_level" + type: "string" + order: 7 + lakeformation_database_default_tag_values: + title: "Lake Formation Database Tag Values" + description: + "Add default values for the `Tag Key` to databases created\ + \ by this destination. Comma separate for multiple values." + examples: + - "private,public" + type: "string" + order: 8 + lakeformation_governed_tables: + title: "Lake Formation Governed Tables" + description: "Whether to create tables as LF governed tables." + type: "boolean" + default: false + order: 9 + format: + title: "Output Format *" + type: "object" + description: "Format of the data output." + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "GZIP" + default: "UNCOMPRESSED" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "ZSTD" + default: "SNAPPY" + order: 10 + partitioning: + title: "Choose how to partition data" + description: "Partition data by cursor fields when a cursor field is a date" + type: "string" + enum: + - "NO PARTITIONING" + - "DATE" + - "YEAR" + - "MONTH" + - "DAY" + - "YEAR/MONTH" + - "YEAR/MONTH/DAY" + default: "NO PARTITIONING" + order: 11 + glue_catalog_float_as_decimal: + title: "Glue Catalog: Float as Decimal" + description: + "Cast float/double as decimal(38,18). This can help achieve\ + \ higher accuracy and represent numbers correctly as received from the\ + \ source." + type: "boolean" + default: false + order: 12 + destinationType: + title: "aws-datalake" + const: "aws-datalake" + enum: + - "aws-datalake" + order: 0 + type: "string" + destination-aws-datalake-update: + title: "AWS Datalake Destination Spec" + type: "object" + required: + - "credentials" + - "region" + - "bucket_name" + - "lakeformation_database_name" + properties: + aws_account_id: + type: "string" + title: "AWS Account Id" + description: "target aws account id" + examples: + - "111111111111" + order: 1 + credentials: + title: "Authentication mode" + description: "Choose How to Authenticate to AWS." + type: "object" + oneOf: + - type: "object" + title: "IAM Role" + required: + - "role_arn" + - "credentials_title" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM Role" + enum: + - "IAM Role" + default: "IAM Role" + order: 0 + role_arn: + title: "Target Role Arn" + type: "string" + description: "Will assume this role to write data to s3" + airbyte_secret: false + - type: "object" + title: "IAM User" + required: + - "credentials_title" + - "aws_access_key_id" + - "aws_secret_access_key" + properties: + credentials_title: + type: "string" + title: "Credentials Title" + description: "Name of the credentials" + const: "IAM User" + enum: + - "IAM User" + default: "IAM User" + order: 0 + aws_access_key_id: + title: "Access Key Id" + type: "string" + description: "AWS User Access Key Id" + airbyte_secret: true + aws_secret_access_key: + title: "Secret Access Key" + type: "string" + description: "Secret Access Key" + airbyte_secret: true + order: 2 + region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + order: 4 + bucket_prefix: + title: "Target S3 Bucket Prefix" + type: "string" + description: "S3 prefix" + order: 5 + lakeformation_database_name: + title: "Lake Formation Database Name" + type: "string" + description: + "The default database this destination will use to create tables\ + \ in per stream. Can be changed per connection by customizing the namespace." + order: 6 + lakeformation_database_default_tag_key: + title: "Lake Formation Database Tag Key" + description: "Add a default tag key to databases created by this destination" + examples: + - "pii_level" + type: "string" + order: 7 + lakeformation_database_default_tag_values: + title: "Lake Formation Database Tag Values" + description: + "Add default values for the `Tag Key` to databases created\ + \ by this destination. Comma separate for multiple values." + examples: + - "private,public" + type: "string" + order: 8 + lakeformation_governed_tables: + title: "Lake Formation Governed Tables" + description: "Whether to create tables as LF governed tables." + type: "boolean" + default: false + order: 9 + format: + title: "Output Format *" + type: "object" + description: "Format of the data output." + oneOf: + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "JSONL" + default: "JSONL" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "GZIP" + default: "UNCOMPRESSED" + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type *" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec (Optional)" + description: "The compression algorithm used to compress data." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "ZSTD" + default: "SNAPPY" + order: 10 + partitioning: + title: "Choose how to partition data" + description: "Partition data by cursor fields when a cursor field is a date" + type: "string" + enum: + - "NO PARTITIONING" + - "DATE" + - "YEAR" + - "MONTH" + - "DAY" + - "YEAR/MONTH" + - "YEAR/MONTH/DAY" + default: "NO PARTITIONING" + order: 11 + glue_catalog_float_as_decimal: + title: "Glue Catalog: Float as Decimal" + description: + "Cast float/double as decimal(38,18). This can help achieve\ + \ higher accuracy and represent numbers correctly as received from the\ + \ source." + type: "boolean" + default: false + order: 12 + destination-milvus: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "milvus" + const: "milvus" + enum: + - "milvus" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-milvus-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + required: + - "token" + - "mode" + description: + "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + - "mode" + description: + "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: + "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + required: + - "mode" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-firebolt: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + - "destinationType" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + order: 0 + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + order: 1 + x-speakeasy-param-sensitive: true + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: + "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + enum: + - "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + enum: + - "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + x-speakeasy-param-sensitive: true + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + x-speakeasy-param-sensitive: true + destinationType: + title: "firebolt" + const: "firebolt" + enum: + - "firebolt" + order: 0 + type: "string" + destination-firebolt-update: + title: "Firebolt Spec" + type: "object" + required: + - "client_id" + - "client_secret" + - "account" + - "database" + - "engine" + properties: + client_id: + type: "string" + title: "Client ID" + description: "Firebolt service account ID." + examples: + - "bbl9qth066hmxkwyb0hy2iwk8ktez9dz" + order: 0 + client_secret: + type: "string" + title: "Client Secret" + description: "Firebolt secret, corresponding to the service account ID." + airbyte_secret: true + order: 1 + account: + type: "string" + title: "Account" + description: "Firebolt account to login." + host: + type: "string" + title: "Host" + description: "The host name of your Firebolt database." + examples: + - "api.app.firebolt.io" + database: + type: "string" + title: "Database" + description: "The database to connect to." + engine: + type: "string" + title: "Engine" + description: "Engine name to connect to." + loading_method: + type: "object" + title: "Loading Method" + description: + "Loading method used to select the way data will be uploaded\ + \ to Firebolt" + oneOf: + - title: "SQL Inserts" + additionalProperties: false + required: + - "method" + properties: + method: + type: "string" + const: "SQL" + enum: + - "SQL" + - title: "External Table via S3" + additionalProperties: false + required: + - "method" + - "s3_bucket" + - "s3_region" + - "aws_key_id" + - "aws_key_secret" + properties: + method: + type: "string" + const: "S3" + enum: + - "S3" + s3_bucket: + type: "string" + title: "S3 bucket name" + description: "The name of the S3 bucket." + s3_region: + type: "string" + title: "S3 region name" + description: "Region name of the S3 bucket." + examples: + - "us-east-1" + aws_key_id: + type: "string" + title: "AWS Key ID" + airbyte_secret: true + description: "AWS access key granting read and write access to S3." + aws_key_secret: + type: "string" + title: "AWS Key Secret" + airbyte_secret: true + description: "Corresponding secret part of the AWS Key" + destination-google-sheets: + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + - "destinationType" + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "Authentication via Google (OAuth)" + description: + "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + x-speakeasy-param-sensitive: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + x-speakeasy-param-sensitive: true + destinationType: + title: "google-sheets" + const: "google-sheets" + enum: + - "google-sheets" + order: 0 + type: "string" + destination-google-sheets-update: + title: "Destination Google Sheets" + type: "object" + required: + - "spreadsheet_id" + - "credentials" + properties: + spreadsheet_id: + type: "string" + title: "Spreadsheet Link" + description: + "The link to your spreadsheet. See this\ + \ guide for more details." + examples: + - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" + credentials: + type: "object" + title: "Authentication via Google (OAuth)" + description: + "Google API Credentials for connecting to Google Sheets and\ + \ Google Drive APIs" + required: + - "client_id" + - "client_secret" + - "refresh_token" + properties: + client_id: + title: "Client ID" + type: "string" + description: "The Client ID of your Google Sheets developer application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "The Client Secret of your Google Sheets developer application." + airbyte_secret: true + refresh_token: + title: "Refresh Token" + type: "string" + description: "The token for obtaining new access token." + airbyte_secret: true + destination-astra: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + astra_db_app_token: + title: "Astra DB Application Token" + description: + "The application token authorizes a user to connect to\ + \ a specific Astra DB database. It is created when the user clicks\ + \ the Generate Token button on the Overview tab of the Database page\ + \ in the Astra UI." + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + astra_db_endpoint: + title: "Astra DB Endpoint" + description: + "The endpoint specifies which Astra DB database queries\ + \ are sent to. It can be copied from the Database Details section\ + \ of the Overview tab of the Database page in the Astra UI." + pattern: + "^https:\\/\\/([a-z]|[0-9]){8}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){12}-[^\\\ + .]*?\\.apps\\.astra\\.datastax\\.com" + examples: + - "https://8292d414-dd1b-4c33-8431-e838bedc04f7-us-east1.apps.astra.datastax.com" + type: "string" + astra_db_keyspace: + title: "Astra DB Keyspace" + description: + "Keyspaces (or Namespaces) serve as containers for organizing\ + \ data within a database. You can create a new keyspace uisng the\ + \ Data Explorer tab in the Astra UI. The keyspace default_keyspace\ + \ is created for you when you create a Vector Database in Astra DB." + type: "string" + collection: + title: "Astra DB collection" + description: + "Collections hold data. They are analagous to tables in\ + \ traditional Cassandra terminology. This tool will create the collection\ + \ with the provided name automatically if it does not already exist.\ + \ Alternatively, you can create one thorugh the Data Explorer tab\ + \ in the Astra UI." + type: "string" + required: + - "astra_db_app_token" + - "astra_db_endpoint" + - "astra_db_keyspace" + - "collection" + description: + "Astra DB gives developers the APIs, real-time data and ecosystem\ + \ integrations to put accurate RAG and Gen AI apps with fewer hallucinations\ + \ in production." + group: "indexing" + destinationType: + title: "astra" + const: "astra" + enum: + - "astra" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-astra-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + astra_db_app_token: + title: "Astra DB Application Token" + description: + "The application token authorizes a user to connect to\ + \ a specific Astra DB database. It is created when the user clicks\ + \ the Generate Token button on the Overview tab of the Database page\ + \ in the Astra UI." + airbyte_secret: true + type: "string" + astra_db_endpoint: + title: "Astra DB Endpoint" + description: + "The endpoint specifies which Astra DB database queries\ + \ are sent to. It can be copied from the Database Details section\ + \ of the Overview tab of the Database page in the Astra UI." + pattern: + "^https:\\/\\/([a-z]|[0-9]){8}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){4}-([a-z]|[0-9]){12}-[^\\\ + .]*?\\.apps\\.astra\\.datastax\\.com" + examples: + - "https://8292d414-dd1b-4c33-8431-e838bedc04f7-us-east1.apps.astra.datastax.com" + type: "string" + astra_db_keyspace: + title: "Astra DB Keyspace" + description: + "Keyspaces (or Namespaces) serve as containers for organizing\ + \ data within a database. You can create a new keyspace uisng the\ + \ Data Explorer tab in the Astra UI. The keyspace default_keyspace\ + \ is created for you when you create a Vector Database in Astra DB." + type: "string" + collection: + title: "Astra DB collection" + description: + "Collections hold data. They are analagous to tables in\ + \ traditional Cassandra terminology. This tool will create the collection\ + \ with the provided name automatically if it does not already exist.\ + \ Alternatively, you can create one thorugh the Data Explorer tab\ + \ in the Astra UI." + type: "string" + required: + - "astra_db_app_token" + - "astra_db_endpoint" + - "astra_db_keyspace" + - "collection" + description: + "Astra DB gives developers the APIs, real-time data and ecosystem\ + \ integrations to put accurate RAG and Gen AI apps with fewer hallucinations\ + \ in production." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-teradata: + title: "Teradata Destination Spec" + type: "object" + required: + - "host" + - "username" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + x-speakeasy-param-sensitive: true + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "airbyte_td" + default: "airbyte_td" + order: 3 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 5 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the destination database\n prefer - Chose this\ + \ mode to allow unencrypted connection only if the destination database\ + \ does not support encryption\n require - Chose this mode to always\ + \ require encryption. If the destination database server does not support\ + \ encryption, connection will fail\n verify-ca - Chose this mode\ + \ to always require encryption and to verify that the destination database\ + \ server has a valid SSL certificate\n verify-full - This is the\ + \ most secure mode. Chose this mode to always require encryption and to\ + \ verify the identity of the destination database server\n See more information\ + \ - in the docs." + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-ca.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-full.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 7 + destinationType: + title: "teradata" + const: "teradata" + enum: + - "teradata" + order: 0 + type: "string" + destination-teradata-update: + title: "Teradata Destination Spec" + type: "object" + required: + - "host" + - "username" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 1 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "airbyte_td" + default: "airbyte_td" + order: 3 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 5 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the destination database\n prefer - Chose this\ + \ mode to allow unencrypted connection only if the destination database\ + \ does not support encryption\n require - Chose this mode to always\ + \ require encryption. If the destination database server does not support\ + \ encryption, connection will fail\n verify-ca - Chose this mode\ + \ to always require encryption and to verify that the destination database\ + \ server has a valid SSL certificate\n verify-full - This is the\ + \ most secure mode. Chose this mode to always require encryption and to\ + \ verify the identity of the destination database server\n See more information\ + \ - in the docs." + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-ca.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ssl_ca_certificate" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ssl_ca_certificate: + type: "string" + title: "CA certificate" + description: + "Specifies the file name of a PEM file that contains\ + \ Certificate Authority (CA) certificates for use with SSLMODE=verify-full.\n\ + \ See more information - in the docs." + airbyte_secret: true + multiline: true + order: 1 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 7 + destination-pinecone: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + description: + "The Pinecone API key to use matching the environment (copy\ + \ from Pinecone console)" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + pinecone_environment: + title: "Pinecone Environment" + description: "Pinecone Cloud environment to use" + examples: + - "us-west1-gcp" + - "gcp-starter" + type: "string" + index: + title: "Index" + description: "Pinecone index in your project to load data into" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: + "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + destinationType: + title: "pinecone" + const: "pinecone" + enum: + - "pinecone" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-pinecone-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + description: + "The Pinecone API key to use matching the environment (copy\ + \ from Pinecone console)" + airbyte_secret: true + type: "string" + pinecone_environment: + title: "Pinecone Environment" + description: "Pinecone Cloud environment to use" + examples: + - "us-west1-gcp" + - "gcp-starter" + type: "string" + index: + title: "Index" + description: "Pinecone index in your project to load data into" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: + "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-duckdb: + title: "Destination Duckdb" + type: "object" + required: + - "destination_path" + - "destinationType" + properties: + motherduck_api_key: + title: "MotherDuck API Key" + type: "string" + description: "API key to use for authentication to a MotherDuck database." + airbyte_secret: true + x-speakeasy-param-sensitive: true + destination_path: + title: "Destination DB" + type: "string" + description: + "Path to the .duckdb file, or the text 'md:' to connect to\ + \ MotherDuck. The file will be placed inside that local mount. For more\ + \ information check out our docs" + examples: + - "/local/destination.duckdb" + - "md:" + - "motherduck:" + schema: + title: "Destination Schema" + type: "string" + description: "Database schema name, default for duckdb is 'main'." + example: "main" + destinationType: + title: "duckdb" + const: "duckdb" + enum: + - "duckdb" + order: 0 + type: "string" + destination-duckdb-update: + title: "Destination Duckdb" + type: "object" + required: + - "destination_path" + properties: + motherduck_api_key: + title: "MotherDuck API Key" + type: "string" + description: "API key to use for authentication to a MotherDuck database." + airbyte_secret: true + destination_path: + title: "Destination DB" + type: "string" + description: + "Path to the .duckdb file, or the text 'md:' to connect to\ + \ MotherDuck. The file will be placed inside that local mount. For more\ + \ information check out our docs" + examples: + - "/local/destination.duckdb" + - "md:" + - "motherduck:" + schema: + title: "Destination Schema" + type: "string" + description: "Database schema name, default for duckdb is 'main'." + example: "main" + destination-iceberg: + title: "Iceberg Destination Spec" + type: "object" + required: + - "catalog_config" + - "storage_config" + - "format_config" + - "destinationType" + properties: + catalog_config: + title: "Iceberg catalog config" + type: "object" + description: "Catalog config of Iceberg." + oneOf: + - title: "HiveCatalog: Use Apache Hive MetaStore" + required: + - "catalog_type" + - "hive_thrift_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hive" + enum: + - "Hive" + order: 0 + hive_thrift_uri: + title: "Hive Metastore thrift uri" + type: "string" + description: "Hive MetaStore thrift server uri of iceberg catalog." + examples: + - "host:port" + order: 1 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 2 + - title: + "HadoopCatalog: Use hierarchical file systems as same as storage\ + \ config" + description: + "A Hadoop catalog doesn’t need to connect to a Hive MetaStore,\ + \ but can only be used with HDFS or similar file systems that support\ + \ atomic rename." + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hadoop" + enum: + - "Hadoop" + order: 0 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 1 + - title: "JdbcCatalog: Use relational database" + description: + "Using a table in a relational database to manage Iceberg\ + \ tables through JDBC. Read more here. Supporting: PostgreSQL" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Jdbc" + enum: + - "Jdbc" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + jdbc_url: + title: "Jdbc url" + type: "string" + examples: + - "jdbc:postgresql://{host}:{port}/{database}" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please\ + \ select one of the connection modes." + type: "boolean" + default: false + order: 5 + catalog_schema: + title: "schema for Iceberg catalog" + description: + "Iceberg catalog metadata tables are written to catalog\ + \ schema. The usual value for this field is \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 6 + - title: "RESTCatalog" + description: + "The RESTCatalog connects to a REST server at the specified\ + \ URI" + required: + - "catalog_type" + - "rest_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Rest" + enum: + - "Rest" + order: 0 + rest_uri: + title: "REST Server URI" + type: "string" + examples: + - "http://localhost:12345" + order: 1 + rest_credential: + title: + "A credential to exchange for a token in the OAuth2 client\ + \ credentials flow." + type: "string" + airbyte_secret: true + examples: + - "username:password" + order: 2 + x-speakeasy-param-sensitive: true + rest_token: + title: + "A Bearer token which will be used for interaction with the\ + \ server." + type: "string" + airbyte_secret: true + examples: + - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + order: 3 + x-speakeasy-param-sensitive: true + - title: "GlueCatalog" + description: "The GlueCatalog connects to a AWS Glue Catalog" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Glue" + enum: + - "Glue" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + order: 0 + storage_config: + title: "Storage config" + type: "object" + description: "Storage config of Iceberg." + oneOf: + - title: "S3" + type: "object" + description: "S3 object storage" + required: + - "storage_type" + - "access_key_id" + - "secret_access_key" + - "s3_warehouse_uri" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "S3" + enum: + - "S3" + order: 0 + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read\ + \ more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + s3_warehouse_uri: + title: "S3 Warehouse Uri for Iceberg" + type: "string" + description: "The Warehouse Uri for Iceberg" + examples: + - "s3a://my-bucket/path/to/warehouse" + - "s3://my-bucket/path/to/warehouse" + order: 2 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + - "localhost:9000" + order: 4 + s3_path_style_access: + type: "boolean" + description: "Use path style access" + examples: + - true + - false + default: true + order: 5 + - title: "Server-managed" + type: "object" + description: "Server-managed object storage" + required: + - "storage_type" + - "managed_warehouse_name" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "MANAGED" + enum: + - "MANAGED" + order: 0 + managed_warehouse_name: + type: "string" + description: "The name of the managed warehouse" + title: "Warehouse name" + order: 0 + order: 1 + format_config: + title: "File format" + type: "object" + required: + - "format" + description: "File format of Iceberg storage." + properties: + format: + title: "File storage format" + type: "string" + default: "Parquet" + description: "" + enum: + - "Parquet" + - "Avro" + order: 0 + flush_batch_size: + title: "Data file flushing batch size" + description: + "Iceberg data file flush batch size. Incoming rows write\ + \ to cache firstly; When cache size reaches this 'batch size', flush\ + \ into real Iceberg data file." + type: "integer" + default: 10000 + order: 1 + auto_compact: + title: "Auto compact data files" + description: "Auto compact data files when stream close" + type: "boolean" + default: false + order: 2 + compact_target_file_size_in_mb: + title: "Target size of compacted data file" + description: + "Specify the target size of Iceberg data file when performing\ + \ a compaction action. " + type: "integer" + default: 100 + order: 3 + order: 2 + destinationType: + title: "iceberg" + const: "iceberg" + enum: + - "iceberg" + order: 0 + type: "string" + destination-iceberg-update: + title: "Iceberg Destination Spec" + type: "object" + required: + - "catalog_config" + - "storage_config" + - "format_config" + properties: + catalog_config: + title: "Iceberg catalog config" + type: "object" + description: "Catalog config of Iceberg." + oneOf: + - title: "HiveCatalog: Use Apache Hive MetaStore" + required: + - "catalog_type" + - "hive_thrift_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hive" + enum: + - "Hive" + order: 0 + hive_thrift_uri: + title: "Hive Metastore thrift uri" + type: "string" + description: "Hive MetaStore thrift server uri of iceberg catalog." + examples: + - "host:port" + order: 1 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 2 + - title: + "HadoopCatalog: Use hierarchical file systems as same as storage\ + \ config" + description: + "A Hadoop catalog doesn’t need to connect to a Hive MetaStore,\ + \ but can only be used with HDFS or similar file systems that support\ + \ atomic rename." + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Hadoop" + enum: + - "Hadoop" + order: 0 + database: + title: "Default database" + description: + "The default database tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"default\"." + type: "string" + default: "default" + examples: + - "default" + order: 1 + - title: "JdbcCatalog: Use relational database" + description: + "Using a table in a relational database to manage Iceberg\ + \ tables through JDBC. Read more here. Supporting: PostgreSQL" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Jdbc" + enum: + - "Jdbc" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + jdbc_url: + title: "Jdbc url" + type: "string" + examples: + - "jdbc:postgresql://{host}:{port}/{database}" + order: 2 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please\ + \ select one of the connection modes." + type: "boolean" + default: false + order: 5 + catalog_schema: + title: "schema for Iceberg catalog" + description: + "Iceberg catalog metadata tables are written to catalog\ + \ schema. The usual value for this field is \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 6 + - title: "RESTCatalog" + description: + "The RESTCatalog connects to a REST server at the specified\ + \ URI" + required: + - "catalog_type" + - "rest_uri" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Rest" + enum: + - "Rest" + order: 0 + rest_uri: + title: "REST Server URI" + type: "string" + examples: + - "http://localhost:12345" + order: 1 + rest_credential: + title: + "A credential to exchange for a token in the OAuth2 client\ + \ credentials flow." + type: "string" + airbyte_secret: true + examples: + - "username:password" + order: 2 + rest_token: + title: + "A Bearer token which will be used for interaction with the\ + \ server." + type: "string" + airbyte_secret: true + examples: + - "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiaWF0IjoxNTE2MjM5MDIyfQ.SflKxwRJSMeKKF2QT4fwpMeJf36POk6yJV_adQssw5c" + order: 3 + - title: "GlueCatalog" + description: "The GlueCatalog connects to a AWS Glue Catalog" + required: + - "catalog_type" + properties: + catalog_type: + title: "Catalog Type" + type: "string" + default: "Glue" + enum: + - "Glue" + order: 0 + database: + title: "Default schema" + description: + "The default schema tables are written to if the source\ + \ does not specify a namespace. The usual value for this field is\ + \ \"public\"." + type: "string" + default: "public" + examples: + - "public" + order: 1 + order: 0 + storage_config: + title: "Storage config" + type: "object" + description: "Storage config of Iceberg." + oneOf: + - title: "S3" + type: "object" + description: "S3 object storage" + required: + - "storage_type" + - "access_key_id" + - "secret_access_key" + - "s3_warehouse_uri" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "S3" + enum: + - "S3" + order: 0 + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read\ + \ more here" + title: "S3 Access Key" + airbyte_secret: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + s3_warehouse_uri: + title: "S3 Warehouse Uri for Iceberg" + type: "string" + description: "The Warehouse Uri for Iceberg" + examples: + - "s3a://my-bucket/path/to/warehouse" + - "s3://my-bucket/path/to/warehouse" + order: 2 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 3 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + - "localhost:9000" + order: 4 + s3_path_style_access: + type: "boolean" + description: "Use path style access" + examples: + - true + - false + default: true + order: 5 + - title: "Server-managed" + type: "object" + description: "Server-managed object storage" + required: + - "storage_type" + - "managed_warehouse_name" + properties: + storage_type: + title: "Storage Type" + type: "string" + default: "MANAGED" + enum: + - "MANAGED" + order: 0 + managed_warehouse_name: + type: "string" + description: "The name of the managed warehouse" + title: "Warehouse name" + order: 0 + order: 1 + format_config: + title: "File format" + type: "object" + required: + - "format" + description: "File format of Iceberg storage." + properties: + format: + title: "File storage format" + type: "string" + default: "Parquet" + description: "" + enum: + - "Parquet" + - "Avro" + order: 0 + flush_batch_size: + title: "Data file flushing batch size" + description: + "Iceberg data file flush batch size. Incoming rows write\ + \ to cache firstly; When cache size reaches this 'batch size', flush\ + \ into real Iceberg data file." + type: "integer" + default: 10000 + order: 1 + auto_compact: + title: "Auto compact data files" + description: "Auto compact data files when stream close" + type: "boolean" + default: false + order: 2 + compact_target_file_size_in_mb: + title: "Target size of compacted data file" + description: + "Specify the target size of Iceberg data file when performing\ + \ a compaction action. " + type: "integer" + default: 100 + order: 3 + order: 2 + destination-sftp-json: + title: "Destination SFTP JSON" + type: "object" + required: + - "host" + - "username" + - "password" + - "destination_path" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the SFTP server." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the SFTP server." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - 22 + order: 1 + username: + title: "User" + description: "Username to use to access the SFTP server." + type: "string" + order: 2 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 3 + x-speakeasy-param-sensitive: true + destination_path: + title: "Destination path" + type: "string" + description: "Path to the directory where json files will be written." + examples: + - "/json_data" + order: 4 + destinationType: + title: "sftp-json" + const: "sftp-json" + enum: + - "sftp-json" + order: 0 + type: "string" + destination-sftp-json-update: + title: "Destination SFTP JSON" + type: "object" + required: + - "host" + - "username" + - "password" + - "destination_path" + properties: + host: + title: "Host" + description: "Hostname of the SFTP server." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the SFTP server." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - 22 + order: 1 + username: + title: "User" + description: "Username to use to access the SFTP server." + type: "string" + order: 2 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 3 + destination_path: + title: "Destination path" + type: "string" + description: "Path to the directory where json files will be written." + examples: + - "/json_data" + order: 4 + destination-s3: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + - "destinationType" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + always_show: true + examples: + - "A012345678910EXAMPLE" + order: 0 + x-speakeasy-param-sensitive: true + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + always_show: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + x-speakeasy-param-sensitive: true + role_arn: + type: "string" + description: "The Role ARN" + title: "Role ARN" + examples: + - "arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId" + order: 2 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 3 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 4 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 5 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Avro" + default: "Avro" + order: 0 + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate Level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + order: 1 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + order: 6 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 7 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 8 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 9 + destinationType: + title: "s3" + const: "s3" + enum: + - "s3" + order: 0 + type: "string" + destination-s3-update: + title: "S3 Destination Spec" + type: "object" + required: + - "s3_bucket_name" + - "s3_bucket_path" + - "s3_bucket_region" + - "format" + properties: + access_key_id: + type: "string" + description: + "The access key ID to access the S3 bucket. Airbyte requires\ + \ Read and Write permissions to the given bucket. Read more here." + title: "S3 Key ID" + airbyte_secret: true + always_show: true + examples: + - "A012345678910EXAMPLE" + order: 0 + secret_access_key: + type: "string" + description: + "The corresponding secret to the access key ID. Read more here" + title: "S3 Access Key" + airbyte_secret: true + always_show: true + examples: + - "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" + order: 1 + role_arn: + type: "string" + description: "The Role ARN" + title: "Role ARN" + examples: + - "arn:aws:iam::123456789:role/ExternalIdIsYourWorkspaceId" + order: 2 + s3_bucket_name: + title: "S3 Bucket Name" + type: "string" + description: + "The name of the S3 bucket. Read more here." + examples: + - "airbyte_sync" + order: 3 + s3_bucket_path: + title: "S3 Bucket Path" + description: + "Directory under the S3 bucket where data will be written.\ + \ Read more here" + type: "string" + examples: + - "data_sync/test" + order: 4 + s3_bucket_region: + title: "S3 Bucket Region" + type: "string" + default: "" + description: + "The region of the S3 bucket. See here for all region codes." + enum: + - "" + - "af-south-1" + - "ap-east-1" + - "ap-northeast-1" + - "ap-northeast-2" + - "ap-northeast-3" + - "ap-south-1" + - "ap-south-2" + - "ap-southeast-1" + - "ap-southeast-2" + - "ap-southeast-3" + - "ap-southeast-4" + - "ca-central-1" + - "ca-west-1" + - "cn-north-1" + - "cn-northwest-1" + - "eu-central-1" + - "eu-central-2" + - "eu-north-1" + - "eu-south-1" + - "eu-south-2" + - "eu-west-1" + - "eu-west-2" + - "eu-west-3" + - "il-central-1" + - "me-central-1" + - "me-south-1" + - "sa-east-1" + - "us-east-1" + - "us-east-2" + - "us-gov-east-1" + - "us-gov-west-1" + - "us-west-1" + - "us-west-2" + order: 5 + format: + title: "Output Format" + type: "object" + description: + "Format of the data output. See here for more details" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "CSV" + default: "CSV" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".csv.gz\")." + oneOf: + - title: "No Compression" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: + - "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "JSON Lines: Newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "JSONL" + default: "JSONL" + flattening: + type: "string" + title: "Flattening" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output JSON Lines. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + compression: + title: "Compression" + type: "object" + description: + "Whether the output files should be compressed. If compression\ + \ is selected, the output filename will have an extra extension\ + \ (GZIP: \".jsonl.gz\")." + oneOf: + - title: "No Compression" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "No Compression" + default: "No Compression" + - title: "GZIP" + requires: "compression_type" + properties: + compression_type: + type: "string" + enum: + - "GZIP" + default: "GZIP" + - title: "Avro: Apache Avro" + required: + - "format_type" + - "compression_codec" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Avro" + default: "Avro" + order: 0 + compression_codec: + title: "Compression Codec" + description: + "The compression algorithm used to compress data. Default\ + \ to no compression." + type: "object" + oneOf: + - title: "No Compression" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "no compression" + default: "no compression" + - title: "Deflate" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "Deflate" + default: "Deflate" + compression_level: + title: "Deflate Level" + description: + "0: no compression & fastest, 9: best compression\ + \ & slowest." + type: "integer" + default: 0 + minimum: 0 + maximum: 9 + - title: "bzip2" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "bzip2" + default: "bzip2" + - title: "xz" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "xz" + default: "xz" + compression_level: + title: "Compression Level" + description: + "See here for details." + type: "integer" + default: 6 + minimum: 0 + maximum: 9 + - title: "zstandard" + required: + - "codec" + - "compression_level" + properties: + codec: + type: "string" + enum: + - "zstandard" + default: "zstandard" + compression_level: + title: "Compression Level" + description: + "Negative levels are 'fast' modes akin to lz4 or\ + \ snappy, levels above 9 are generally for archival purposes,\ + \ and levels above 18 use a lot of memory." + type: "integer" + default: 3 + minimum: -5 + maximum: 22 + include_checksum: + title: "Include Checksum" + description: "If true, include a checksum with each data block." + type: "boolean" + default: false + - title: "snappy" + required: + - "codec" + properties: + codec: + type: "string" + enum: + - "snappy" + default: "snappy" + order: 1 + - title: "Parquet: Columnar Storage" + required: + - "format_type" + properties: + format_type: + title: "Format Type" + type: "string" + enum: + - "Parquet" + default: "Parquet" + compression_codec: + title: "Compression Codec" + description: "The compression algorithm used to compress data pages." + type: "string" + enum: + - "UNCOMPRESSED" + - "SNAPPY" + - "GZIP" + - "LZO" + - "BROTLI" + - "LZ4" + - "ZSTD" + default: "UNCOMPRESSED" + block_size_mb: + title: "Block Size (Row Group Size) (MB)" + description: + "This is the size of a row group being buffered in memory.\ + \ It limits the memory usage when writing. Larger values will improve\ + \ the IO when reading, but consume more memory when writing. Default:\ + \ 128 MB." + type: "integer" + default: 128 + examples: + - 128 + max_padding_size_mb: + title: "Max Padding Size (MB)" + description: + "Maximum size allowed as padding to align row groups.\ + \ This is also the minimum size of a row group. Default: 8 MB." + type: "integer" + default: 8 + examples: + - 8 + page_size_kb: + title: "Page Size (KB)" + description: + "The page size is for compression. A block is composed\ + \ of pages. A page is the smallest unit that must be read fully\ + \ to access a single record. If this value is too small, the compression\ + \ will deteriorate. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_page_size_kb: + title: "Dictionary Page Size (KB)" + description: + "There is one dictionary page per column per row group\ + \ when dictionary encoding is used. The dictionary page size works\ + \ like the page size but for dictionary. Default: 1024 KB." + type: "integer" + default: 1024 + examples: + - 1024 + dictionary_encoding: + title: "Dictionary Encoding" + description: "Default: true." + type: "boolean" + default: true + order: 6 + s3_endpoint: + title: "Endpoint" + type: "string" + default: "" + description: + "Your S3 endpoint url. Read more here" + examples: + - "http://localhost:9000" + order: 7 + s3_path_format: + title: "S3 Path Format" + description: + "Format string on how data will be organized inside the S3\ + \ bucket directory. Read more here" + type: "string" + examples: + - "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" + order: 8 + file_name_pattern: + type: "string" + description: + "The pattern allows you to set the file-name format for the\ + \ S3 staging file(s)" + title: "S3 Filename pattern" + examples: + - "{date}" + - "{date:yyyy_MM}" + - "{timestamp}" + - "{part_number}" + - "{sync_id}" + order: 9 + destination-redis: + title: "Redis Destination Spec" + type: "object" + required: + - "host" + - "username" + - "port" + - "cache_type" + - "destinationType" + properties: + host: + title: "Host" + description: "Redis host to connect to." + type: "string" + examples: + - "localhost,127.0.0.1" + order: 1 + port: + title: "Port" + description: "Port of Redis." + type: "integer" + minimum: 0 + maximum: 65536 + default: 6379 + order: 2 + username: + title: "Username" + description: "Username associated with Redis." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with Redis." + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + type: "boolean" + description: + "Indicates whether SSL encryption protocol will be used to\ + \ connect to Redis. It is recommended to use SSL connection if possible." + default: false + order: 5 + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n
  • verify-full - This is\ + \ the most secure mode. Always require encryption and verifies the identity\ + \ of the source database server" + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + cache_type: + title: "Cache type" + type: "string" + default: "hash" + description: "Redis cache type to store data in." + enum: + - "hash" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "redis" + const: "redis" + enum: + - "redis" + order: 0 + type: "string" + destination-redis-update: + title: "Redis Destination Spec" + type: "object" + required: + - "host" + - "username" + - "port" + - "cache_type" + properties: + host: + title: "Host" + description: "Redis host to connect to." + type: "string" + examples: + - "localhost,127.0.0.1" + order: 1 + port: + title: "Port" + description: "Port of Redis." + type: "integer" + minimum: 0 + maximum: 65536 + default: 6379 + order: 2 + username: + title: "Username" + description: "Username associated with Redis." + type: "string" + order: 3 + password: + title: "Password" + description: "Password associated with Redis." + type: "string" + airbyte_secret: true + order: 4 + ssl: + title: "SSL Connection" + type: "boolean" + description: + "Indicates whether SSL encryption protocol will be used to\ + \ connect to Redis. It is recommended to use SSL connection if possible." + default: false + order: 5 + ssl_mode: + title: "SSL Modes" + description: + "SSL connection modes. \n
  • verify-full - This is\ + \ the most secure mode. Always require encryption and verifies the identity\ + \ of the source database server" + type: "object" + order: 6 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA Certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client Certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client Key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. If you do not add it - the\ + \ password will be generated automatically." + airbyte_secret: true + order: 4 + cache_type: + title: "Cache type" + type: "string" + default: "hash" + description: "Redis cache type to store data in." + enum: + - "hash" + order: 7 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-typesense: + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + - "destinationType" + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: + "Hostname of the Typesense instance without protocol. Accept\ + \ multiple hosts separated by comma." + order: 1 + port: + title: "Port" + type: "string" + description: + "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: + "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "integer" + description: "How many documents should be imported together. Default 1000" + order: 4 + path: + title: "Path" + type: "string" + description: "Path of the Typesense instance. Default is none" + order: 5 + destinationType: + title: "typesense" + const: "typesense" + enum: + - "typesense" + order: 0 + type: "string" + destination-typesense-update: + title: "Destination Typesense" + type: "object" + required: + - "api_key" + - "host" + properties: + api_key: + title: "API Key" + type: "string" + description: "Typesense API Key" + order: 0 + host: + title: "Host" + type: "string" + description: + "Hostname of the Typesense instance without protocol. Accept\ + \ multiple hosts separated by comma." + order: 1 + port: + title: "Port" + type: "string" + description: + "Port of the Typesense instance. Ex: 8108, 80, 443. Default\ + \ is 443" + order: 2 + protocol: + title: "Protocol" + type: "string" + description: + "Protocol of the Typesense instance. Ex: http or https. Default\ + \ is https" + order: 3 + batch_size: + title: "Batch size" + type: "integer" + description: "How many documents should be imported together. Default 1000" + order: 4 + path: + title: "Path" + type: "string" + description: "Path of the Typesense instance. Default is none" + order: 5 + destination-bigquery: + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_location" + - "dataset_id" + - "destinationType" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset. Read more here." + title: "Project ID" + group: "connection" + order: 0 + dataset_location: + type: "string" + description: + "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied. Read more here." + title: "Dataset Location" + group: "connection" + order: 1 + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-southwest1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "europe-west7" + - "europe-west8" + - "europe-west9" + - "europe-west12" + - "me-central1" + - "me-central2" + - "me-west1" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "southamerica-east1" + - "southamerica-west1" + - "us-central1" + - "us-east1" + - "us-east2" + - "us-east3" + - "us-east4" + - "us-east5" + - "us-south1" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + dataset_id: + type: "string" + description: + "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" + group: "connection" + order: 2 + loading_method: + type: "object" + title: "Loading Method" + description: "The way data will be uploaded to BigQuery." + display_type: "radio" + group: "connection" + order: 3 + oneOf: + - title: "Batched Standard Inserts" + required: + - "method" + description: + "Direct loading using batched SQL INSERT statements. This\ + \ method uses the BigQuery driver to convert large INSERT statements\ + \ into file uploads automatically." + properties: + method: + type: "string" + const: "Standard" + enum: + - "Standard" + - title: "GCS Staging" + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses COPY INTO to load your data into BigQuery." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + enum: + - "GCS Staging" + credential: + title: "Credential" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read\ + \ more here." + type: "object" + order: 1 + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + order: 0 + enum: + - "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + order: 1 + x-speakeasy-param-sensitive: true + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + order: 2 + x-speakeasy-param-sensitive: true + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the GCS bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + gcs_bucket_path: + title: "GCS Bucket Path" + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + order: 3 + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. By this select you can chose if these records should\ + \ be removed from GCS when migration has finished. The default \"\ + Delete all tmp files from GCS\" value is used if not set explicitly." + title: "GCS Tmp Files Afterward Processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + order: 4 + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Service Account Key JSON (Required for cloud, optional for open-source)" + airbyte_secret: true + group: "connection" + order: 4 + always_show: true + x-speakeasy-param-sensitive: true + transformation_priority: + type: "string" + description: + "Interactive run type means that the query is executed as soon\ + \ as possible, and these queries count towards concurrent rate limit and\ + \ daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources\ + \ are available in the BigQuery shared resource pool, which usually occurs\ + \ within a few minutes. Batch queries don’t count towards your concurrent\ + \ rate limit. Read more about batch queries here. The default \"interactive\" value is used if not set explicitly." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + order: 5 + group: "advanced" + big_query_client_buffer_size_mb: + title: "Google BigQuery Client Chunk Size" + description: + "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\ + \ 15) for each table. The size that will be written by a single RPC. Written\ + \ data will be buffered and only flushed upon reaching this size or closing\ + \ the channel. The default 15MB value is used if not set explicitly. Read\ + \ more here." + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + order: 6 + group: "advanced" + raw_data_dataset: + type: "string" + description: "The dataset to write raw tables into (default: airbyte_internal)" + title: "Raw Table Dataset Name" + order: 7 + group: "advanced" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + group: "advanced" + destinationType: + title: "bigquery" + const: "bigquery" + enum: + - "bigquery" + order: 0 + type: "string" + groups: + - id: "connection" + title: "Connection" + - id: "advanced" + title: "Advanced" + destination-bigquery-update: + title: "BigQuery Destination Spec" + type: "object" + required: + - "project_id" + - "dataset_location" + - "dataset_id" + properties: + project_id: + type: "string" + description: + "The GCP project ID for the project containing the target BigQuery\ + \ dataset. Read more here." + title: "Project ID" + group: "connection" + order: 0 + dataset_location: + type: "string" + description: + "The location of the dataset. Warning: Changes made after creation\ + \ will not be applied. Read more here." + title: "Dataset Location" + group: "connection" + order: 1 + enum: + - "US" + - "EU" + - "asia-east1" + - "asia-east2" + - "asia-northeast1" + - "asia-northeast2" + - "asia-northeast3" + - "asia-south1" + - "asia-south2" + - "asia-southeast1" + - "asia-southeast2" + - "australia-southeast1" + - "australia-southeast2" + - "europe-central1" + - "europe-central2" + - "europe-north1" + - "europe-southwest1" + - "europe-west1" + - "europe-west2" + - "europe-west3" + - "europe-west4" + - "europe-west6" + - "europe-west7" + - "europe-west8" + - "europe-west9" + - "europe-west12" + - "me-central1" + - "me-central2" + - "me-west1" + - "northamerica-northeast1" + - "northamerica-northeast2" + - "southamerica-east1" + - "southamerica-west1" + - "us-central1" + - "us-east1" + - "us-east2" + - "us-east3" + - "us-east4" + - "us-east5" + - "us-south1" + - "us-west1" + - "us-west2" + - "us-west3" + - "us-west4" + dataset_id: + type: "string" + description: + "The default BigQuery Dataset ID that tables are replicated\ + \ to if the source does not specify a namespace. Read more here." + title: "Default Dataset ID" + group: "connection" + order: 2 + loading_method: + type: "object" + title: "Loading Method" + description: "The way data will be uploaded to BigQuery." + display_type: "radio" + group: "connection" + order: 3 + oneOf: + - title: "Batched Standard Inserts" + required: + - "method" + description: + "Direct loading using batched SQL INSERT statements. This\ + \ method uses the BigQuery driver to convert large INSERT statements\ + \ into file uploads automatically." + properties: + method: + type: "string" + const: "Standard" + enum: + - "Standard" + - title: "GCS Staging" + description: + "Writes large batches of records to a file, uploads the file\ + \ to GCS, then uses COPY INTO to load your data into BigQuery." + required: + - "method" + - "gcs_bucket_name" + - "gcs_bucket_path" + - "credential" + properties: + method: + type: "string" + const: "GCS Staging" + enum: + - "GCS Staging" + credential: + title: "Credential" + description: + "An HMAC key is a type of credential and can be associated\ + \ with a service account or a user account in Cloud Storage. Read\ + \ more here." + type: "object" + order: 1 + oneOf: + - title: "HMAC key" + required: + - "credential_type" + - "hmac_key_access_id" + - "hmac_key_secret" + properties: + credential_type: + type: "string" + const: "HMAC_KEY" + order: 0 + enum: + - "HMAC_KEY" + hmac_key_access_id: + type: "string" + description: + "HMAC key access ID. When linked to a service account,\ + \ this ID is 61 characters long; when linked to a user account,\ + \ it is 24 characters long." + title: "HMAC Key Access ID" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234" + order: 1 + hmac_key_secret: + type: "string" + description: + "The corresponding secret for the access ID. It\ + \ is a 40-character base-64 encoded string." + title: "HMAC Key Secret" + airbyte_secret: true + examples: + - "1234567890abcdefghij1234567890ABCDEFGHIJ" + order: 2 + gcs_bucket_name: + title: "GCS Bucket Name" + type: "string" + description: + "The name of the GCS bucket. Read more here." + examples: + - "airbyte_sync" + order: 2 + gcs_bucket_path: + title: "GCS Bucket Path" + description: "Directory under the GCS bucket where data will be written." + type: "string" + examples: + - "data_sync/test" + order: 3 + keep_files_in_gcs-bucket: + type: "string" + description: + "This upload method is supposed to temporary store records\ + \ in GCS bucket. By this select you can chose if these records should\ + \ be removed from GCS when migration has finished. The default \"\ + Delete all tmp files from GCS\" value is used if not set explicitly." + title: "GCS Tmp Files Afterward Processing" + default: "Delete all tmp files from GCS" + enum: + - "Delete all tmp files from GCS" + - "Keep all tmp files in GCS" + order: 4 + credentials_json: + type: "string" + description: + "The contents of the JSON service account key. Check out the\ + \ docs if you need help generating this key. Default credentials will\ + \ be used if this field is left empty." + title: "Service Account Key JSON (Required for cloud, optional for open-source)" + airbyte_secret: true + group: "connection" + order: 4 + always_show: true + transformation_priority: + type: "string" + description: + "Interactive run type means that the query is executed as soon\ + \ as possible, and these queries count towards concurrent rate limit and\ + \ daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources\ + \ are available in the BigQuery shared resource pool, which usually occurs\ + \ within a few minutes. Batch queries don’t count towards your concurrent\ + \ rate limit. Read more about batch queries here. The default \"interactive\" value is used if not set explicitly." + title: "Transformation Query Run Type" + default: "interactive" + enum: + - "interactive" + - "batch" + order: 5 + group: "advanced" + big_query_client_buffer_size_mb: + title: "Google BigQuery Client Chunk Size" + description: + "Google BigQuery client's chunk (buffer) size (MIN=1, MAX =\ + \ 15) for each table. The size that will be written by a single RPC. Written\ + \ data will be buffered and only flushed upon reaching this size or closing\ + \ the channel. The default 15MB value is used if not set explicitly. Read\ + \ more here." + type: "integer" + minimum: 1 + maximum: 15 + default: 15 + examples: + - "15" + order: 6 + group: "advanced" + raw_data_dataset: + type: "string" + description: "The dataset to write raw tables into (default: airbyte_internal)" + title: "Raw Table Dataset Name" + order: 7 + group: "advanced" + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 8 + group: "advanced" + groups: + - id: "connection" + title: "Connection" + - id: "advanced" + title: "Advanced" + destination-elasticsearch: + title: "Elasticsearch Connection Configuration" + type: "object" + required: + - "endpoint" + - "destinationType" + properties: + endpoint: + title: "Server Endpoint" + type: "string" + description: "The full url of the Elasticsearch server" + upsert: + type: "boolean" + title: "Upsert Records" + description: + "If a primary key identifier is defined in the source, an upsert\ + \ will be performed using the primary key value as the elasticsearch doc\ + \ id. Does not support composite primary keys." + default: true + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + x-speakeasy-param-sensitive: true + authenticationMethod: + title: "Authentication Method" + type: "object" + description: "The type of authentication to be used" + oneOf: + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + enum: + - "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + enum: + - "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + enum: + - "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true + x-speakeasy-param-sensitive: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "elasticsearch" + const: "elasticsearch" + enum: + - "elasticsearch" + order: 0 + type: "string" + destination-elasticsearch-update: + title: "Elasticsearch Connection Configuration" + type: "object" + required: + - "endpoint" + properties: + endpoint: + title: "Server Endpoint" + type: "string" + description: "The full url of the Elasticsearch server" + upsert: + type: "boolean" + title: "Upsert Records" + description: + "If a primary key identifier is defined in the source, an upsert\ + \ will be performed using the primary key value as the elasticsearch doc\ + \ id. Does not support composite primary keys." + default: true + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + authenticationMethod: + title: "Authentication Method" + type: "object" + description: "The type of authentication to be used" + oneOf: + - title: "None" + additionalProperties: false + description: "No authentication will be used" + required: + - "method" + properties: + method: + type: "string" + const: "none" + enum: + - "none" + - title: "Api Key/Secret" + additionalProperties: false + description: "Use a api key and secret combination to authenticate" + required: + - "method" + - "apiKeyId" + - "apiKeySecret" + properties: + method: + type: "string" + const: "secret" + enum: + - "secret" + apiKeyId: + title: "API Key ID" + description: + "The Key ID to used when accessing an enterprise Elasticsearch\ + \ instance." + type: "string" + apiKeySecret: + title: "API Key Secret" + description: "The secret associated with the API Key ID." + type: "string" + airbyte_secret: true + - title: "Username/Password" + additionalProperties: false + description: "Basic auth header with a username and password" + required: + - "method" + - "username" + - "password" + properties: + method: + type: "string" + const: "basic" + enum: + - "basic" + username: + title: "Username" + description: + "Basic auth username to access a secure Elasticsearch\ + \ server" + type: "string" + password: + title: "Password" + description: + "Basic auth password to access a secure Elasticsearch\ + \ server" + type: "string" + airbyte_secret: true + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-azure-blob-storage: + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + - "destinationType" + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: + "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + x-speakeasy-param-sensitive: true + azure_blob_storage_output_buffer_size: + title: "Azure Blob Storage output buffer size (Megabytes)" + type: "integer" + description: + "The amount of megabytes to buffer for the output stream to\ + \ Azure. This will impact memory footprint on workers, but may need adjustment\ + \ for performance and appropriate block size in Azure." + minimum: 1 + maximum: 2047 + default: 5 + examples: + - 5 + azure_blob_storage_spill_size: + title: "Azure Blob Storage file spill size" + type: "integer" + description: + "The amount of megabytes after which the connector should spill\ + \ the records in a new blob object. Make sure to configure size greater\ + \ than individual records. Enter 0 if not applicable" + default: 500 + examples: + - 500 + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + enum: + - "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + enum: + - "JSONL" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + destinationType: + title: "azure-blob-storage" + const: "azure-blob-storage" + enum: + - "azure-blob-storage" + order: 0 + type: "string" + destination-azure-blob-storage-update: + title: "AzureBlobStorage Destination Spec" + type: "object" + required: + - "azure_blob_storage_account_name" + - "azure_blob_storage_account_key" + - "format" + properties: + azure_blob_storage_endpoint_domain_name: + title: "Endpoint Domain Name" + type: "string" + default: "blob.core.windows.net" + description: + "This is Azure Blob Storage endpoint domain name. Leave default\ + \ value (or leave it empty if run container from command line) to use\ + \ Microsoft native from example." + examples: + - "blob.core.windows.net" + azure_blob_storage_container_name: + title: "Azure blob storage container (Bucket) Name" + type: "string" + description: + "The name of the Azure blob storage container. If not exists\ + \ - will be created automatically. May be empty, then will be created\ + \ automatically airbytecontainer+timestamp" + examples: + - "airbytetescontainername" + azure_blob_storage_account_name: + title: "Azure Blob Storage account name" + type: "string" + description: "The account's name of the Azure Blob Storage." + examples: + - "airbyte5storage" + azure_blob_storage_account_key: + title: "Azure Blob Storage account key" + description: "The Azure blob storage account key." + airbyte_secret: true + type: "string" + examples: + - "Z8ZkZpteggFx394vm+PJHnGTvdRncaYS+JhLKdj789YNmD+iyGTnG+PV+POiuYNhBg/ACS+LKjd%4FG3FHGN12Nd==" + azure_blob_storage_output_buffer_size: + title: "Azure Blob Storage output buffer size (Megabytes)" + type: "integer" + description: + "The amount of megabytes to buffer for the output stream to\ + \ Azure. This will impact memory footprint on workers, but may need adjustment\ + \ for performance and appropriate block size in Azure." + minimum: 1 + maximum: 2047 + default: 5 + examples: + - 5 + azure_blob_storage_spill_size: + title: "Azure Blob Storage file spill size" + type: "integer" + description: + "The amount of megabytes after which the connector should spill\ + \ the records in a new blob object. Make sure to configure size greater\ + \ than individual records. Enter 0 if not applicable" + default: 500 + examples: + - 500 + format: + title: "Output Format" + type: "object" + description: "Output data format" + oneOf: + - title: "CSV: Comma-Separated Values" + required: + - "format_type" + - "flattening" + properties: + format_type: + type: "string" + const: "CSV" + enum: + - "CSV" + flattening: + type: "string" + title: "Normalization (Flattening)" + description: + "Whether the input json data should be normalized (flattened)\ + \ in the output CSV. Please refer to docs for details." + default: "No flattening" + enum: + - "No flattening" + - "Root level flattening" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + - title: "JSON Lines: newline-delimited JSON" + required: + - "format_type" + properties: + format_type: + type: "string" + const: "JSONL" + enum: + - "JSONL" + file_extension: + title: "File Extension" + type: "boolean" + default: false + description: "Add file extensions to the output file." + destination-pgvector: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + x-speakeasy-param-sensitive: true + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Postgres Connection" + type: "object" + properties: + host: + title: "Host" + description: "Enter the account name you want to use to access the database." + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + port: + title: "Port" + description: "Enter the port you want to use to access the database" + default: 5432 + order: 2 + examples: + - "5432" + type: "integer" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + default: "public" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + x-speakeasy-param-sensitive: true + required: + - "password" + required: + - "host" + - "database" + - "username" + - "credentials" + description: "Postgres can be used to store vector data and retrieve embeddings." + group: "indexing" + destinationType: + title: "pgvector" + const: "pgvector" + enum: + - "pgvector" + order: 0 + type: "string" + required: + - "embedding" + - "processing" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-pgvector-update: + title: "Destination Config" + description: + "The configuration model for the Vector DB based destinations.\ + \ This model is used to generate the UI for the destination configuration,\n\ + as well as to provide type safety for the configuration passed to the destination.\n\ + \nThe configuration model is composed of four parts:\n* Processing configuration\n\ + * Embedding configuration\n* Indexing configuration\n* Advanced configuration\n\ + \nProcessing, embedding and advanced configuration are provided by this base\ + \ class, while the indexing configuration is provided by the destination connector\ + \ in the sub class." + type: "object" + properties: + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + - "mode" + description: + "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + - "mode" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: + "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + required: + - "mode" + - title: "Azure OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "azure_openai" + const: "azure_openai" + enum: + - "azure_openai" + type: "string" + openai_key: + title: "Azure OpenAI API key" + description: + "The API key for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + airbyte_secret: true + type: "string" + api_base: + title: "Resource base URL" + description: + "The base URL for your Azure OpenAI resource. You can\ + \ find this in the Azure portal under your Azure OpenAI resource" + examples: + - "https://your-resource-name.openai.azure.com" + type: "string" + deployment: + title: "Deployment" + description: + "The deployment for your Azure OpenAI resource. You\ + \ can find this in the Azure portal under your Azure OpenAI resource" + examples: + - "your-resource-name" + type: "string" + required: + - "openai_key" + - "api_base" + - "deployment" + - "mode" + description: + "Use the Azure-hosted OpenAI API to embed text. This option\ + \ is using the text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "OpenAI-compatible" + type: "object" + properties: + mode: + title: "Mode" + default: "openai_compatible" + const: "openai_compatible" + enum: + - "openai_compatible" + type: "string" + api_key: + title: "API key" + default: "" + airbyte_secret: true + type: "string" + base_url: + title: "Base URL" + description: "The base URL for your OpenAI-compatible service" + examples: + - "https://your-service-name.com" + type: "string" + model_name: + title: "Model name" + description: "The name of the model to use for embedding" + default: "text-embedding-ada-002" + examples: + - "text-embedding-ada-002" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "base_url" + - "dimensions" + - "mode" + description: + "Use a service that's compatible with the OpenAI API to embed\ + \ text." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: + "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + minimum: 1 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: + "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: + "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: + "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + text_splitter: + title: "Text splitter" + description: "Split text fields into chunks based on the specified method." + type: "object" + oneOf: + - title: "By Separator" + type: "object" + properties: + mode: + title: "Mode" + default: "separator" + const: "separator" + enum: + - "separator" + type: "string" + separators: + title: "Separators" + description: + "List of separator strings to split text fields by.\ + \ The separator itself needs to be wrapped in double quotes,\ + \ e.g. to split by the dot character, use \".\". To split by\ + \ a newline, use \"\\n\"." + default: + - "\"\\n\\n\"" + - "\"\\n\"" + - '" "' + - '""' + type: "array" + items: + type: "string" + keep_separator: + title: "Keep separator" + description: "Whether to keep the separator in the resulting chunks" + default: false + type: "boolean" + description: + "Split the text by the list of separators until the chunk\ + \ size is reached, using the earlier mentioned separators where\ + \ possible. This is useful for splitting text fields by paragraphs,\ + \ sentences, words, etc." + required: + - "mode" + - title: "By Markdown header" + type: "object" + properties: + mode: + title: "Mode" + default: "markdown" + const: "markdown" + enum: + - "markdown" + type: "string" + split_level: + title: "Split level" + description: + "Level of markdown headers to split text fields by.\ + \ Headings down to the specified level will be used as split\ + \ points" + default: 1 + minimum: 1 + maximum: 6 + type: "integer" + description: + "Split the text by Markdown headers down to the specified\ + \ header level. If the chunk size fits multiple sections, they will\ + \ be combined into a single chunk." + required: + - "mode" + - title: "By Programming Language" + type: "object" + properties: + mode: + title: "Mode" + default: "code" + const: "code" + enum: + - "code" + type: "string" + language: + title: "Language" + description: + "Split code in suitable places based on the programming\ + \ language" + enum: + - "cpp" + - "go" + - "java" + - "js" + - "php" + - "proto" + - "python" + - "rst" + - "ruby" + - "rust" + - "scala" + - "swift" + - "markdown" + - "latex" + - "html" + - "sol" + type: "string" + required: + - "language" + - "mode" + description: + "Split the text by suitable delimiters based on the programming\ + \ language. This is useful for splitting code into chunks." + field_name_mappings: + title: "Field name mappings" + description: + "List of fields to rename. Not applicable for nested fields,\ + \ but can be used to rename fields already flattened via dot notation." + default: [] + type: "array" + items: + title: "FieldNameMappingConfigModel" + type: "object" + properties: + from_field: + title: "From field name" + description: "The field name in the source" + type: "string" + to_field: + title: "To field name" + description: "The field name to use in the destination" + type: "string" + required: + - "from_field" + - "to_field" + required: + - "chunk_size" + group: "processing" + omit_raw_text: + title: "Do not store raw text" + description: + "Do not store the text that gets embedded along with the vector\ + \ and the metadata in the destination. If set to true, only the vector\ + \ and the metadata will be stored - in this case raw text for LLM use\ + \ cases needs to be retrieved from another source." + default: false + group: "advanced" + type: "boolean" + indexing: + title: "Postgres Connection" + type: "object" + properties: + host: + title: "Host" + description: "Enter the account name you want to use to access the database." + order: 1 + examples: + - "AIRBYTE_ACCOUNT" + type: "string" + port: + title: "Port" + description: "Enter the port you want to use to access the database" + default: 5432 + order: 2 + examples: + - "5432" + type: "integer" + database: + title: "Database" + description: + "Enter the name of the database that you want to sync data\ + \ into" + order: 4 + examples: + - "AIRBYTE_DATABASE" + type: "string" + default_schema: + title: "Default Schema" + description: "Enter the name of the default schema" + default: "public" + order: 5 + examples: + - "AIRBYTE_SCHEMA" + type: "string" + username: + title: "Username" + description: + "Enter the name of the user you want to use to access the\ + \ database" + order: 6 + examples: + - "AIRBYTE_USER" + type: "string" + credentials: + title: "Credentials" + type: "object" + properties: + password: + title: "Password" + description: "Enter the password you want to use to access the database" + airbyte_secret: true + examples: + - "AIRBYTE_PASSWORD" + order: 7 + type: "string" + required: + - "password" + required: + - "host" + - "database" + - "username" + - "credentials" + description: "Postgres can be used to store vector data and retrieve embeddings." + group: "indexing" + required: + - "embedding" + - "processing" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + - id: "advanced" + title: "Advanced" + destination-yellowbrick: + title: "Yellowbrick Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "yellowbrick" + const: "yellowbrick" + enum: + - "yellowbrick" + order: 0 + type: "string" + destination-yellowbrick-update: + title: "Yellowbrick Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + destination-postgres: + title: "Postgres Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + - "destinationType" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + x-speakeasy-param-sensitive: true + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + x-speakeasy-param-sensitive: true + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + x-speakeasy-param-sensitive: true + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + x-speakeasy-param-sensitive: true + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into" + title: "Raw table schema (defaults to airbyte_internal)" + order: 9 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 10 + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 11 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + x-speakeasy-param-sensitive: true + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + x-speakeasy-param-sensitive: true + destinationType: + title: "postgres" + const: "postgres" + enum: + - "postgres" + order: 0 + type: "string" + destination-postgres-update: + title: "Postgres Destination Spec" + type: "object" + required: + - "host" + - "port" + - "username" + - "database" + - "schema" + properties: + host: + title: "Host" + description: "Hostname of the database." + type: "string" + order: 0 + port: + title: "Port" + description: "Port of the database." + type: "integer" + minimum: 0 + maximum: 65536 + default: 5432 + examples: + - "5432" + order: 1 + database: + title: "DB Name" + description: "Name of the database." + type: "string" + order: 2 + schema: + title: "Default Schema" + description: + "The default schema tables are written to if the source does\ + \ not specify a namespace. The usual value for this field is \"public\"\ + ." + type: "string" + examples: + - "public" + default: "public" + order: 3 + username: + title: "User" + description: "Username to use to access the database." + type: "string" + order: 4 + password: + title: "Password" + description: "Password associated with the username." + type: "string" + airbyte_secret: true + order: 5 + ssl: + title: "SSL Connection" + description: + "Encrypt data using SSL. When activating SSL, please select\ + \ one of the connection modes." + type: "boolean" + default: false + order: 6 + ssl_mode: + title: "SSL modes" + description: + "SSL connection modes. \n disable - Chose this mode\ + \ to disable encryption of communication between Airbyte and destination\ + \ database\n allow - Chose this mode to enable encryption only\ + \ when required by the source database\n prefer - Chose this mode\ + \ to allow unencrypted connection only if the source database does not\ + \ support encryption\n require - Chose this mode to always require\ + \ encryption. If the source database server does not support encryption,\ + \ connection will fail\n verify-ca - Chose this mode to always\ + \ require encryption and to verify that the source database server has\ + \ a valid SSL certificate\n verify-full - This is the most secure\ + \ mode. Chose this mode to always require encryption and to verify the\ + \ identity of the source database server\n See more information - in the\ + \ docs." + type: "object" + order: 7 + oneOf: + - title: "disable" + additionalProperties: false + description: "Disable SSL." + required: + - "mode" + properties: + mode: + type: "string" + const: "disable" + enum: + - "disable" + default: "disable" + order: 0 + - title: "allow" + additionalProperties: false + description: "Allow SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "allow" + enum: + - "allow" + default: "allow" + order: 0 + - title: "prefer" + additionalProperties: false + description: "Prefer SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "prefer" + enum: + - "prefer" + default: "prefer" + order: 0 + - title: "require" + additionalProperties: false + description: "Require SSL mode." + required: + - "mode" + properties: + mode: + type: "string" + const: "require" + enum: + - "require" + default: "require" + order: 0 + - title: "verify-ca" + additionalProperties: false + description: "Verify-ca SSL mode." + required: + - "mode" + - "ca_certificate" + properties: + mode: + type: "string" + const: "verify-ca" + enum: + - "verify-ca" + default: "verify-ca" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + - title: "verify-full" + additionalProperties: false + description: "Verify-full SSL mode." + required: + - "mode" + - "ca_certificate" + - "client_certificate" + - "client_key" + properties: + mode: + type: "string" + const: "verify-full" + enum: + - "verify-full" + default: "verify-full" + order: 0 + ca_certificate: + type: "string" + title: "CA certificate" + description: "CA certificate" + airbyte_secret: true + multiline: true + order: 1 + client_certificate: + type: "string" + title: "Client certificate" + description: "Client certificate" + airbyte_secret: true + multiline: true + order: 2 + client_key: + type: "string" + title: "Client key" + description: "Client key" + airbyte_secret: true + multiline: true + order: 3 + client_key_password: + type: "string" + title: "Client key password" + description: + "Password for keystorage. This field is optional. If\ + \ you do not add it - the password will be generated automatically." + airbyte_secret: true + order: 4 + jdbc_url_params: + description: + "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + title: "JDBC URL Params" + type: "string" + order: 8 + raw_data_schema: + type: "string" + description: "The schema to write raw tables into" + title: "Raw table schema (defaults to airbyte_internal)" + order: 9 + disable_type_dedupe: + type: "boolean" + default: false + description: + "Disable Writing Final Tables. WARNING! The data format in\ + \ _airbyte_data is likely stable but there are no guarantees that other\ + \ metadata columns will remain the same in future versions" + title: + "Disable Final Tables. (WARNING! Unstable option; Columns in raw\ + \ table schema might change between versions)" + order: 10 + drop_cascade: + type: "boolean" + default: false + description: + "Drop tables with CASCADE. WARNING! This will delete all data\ + \ in all dependent objects (views, etc.). Use with caution. This option\ + \ is intended for usecases which can easily rebuild the dependent objects." + title: "Drop tables with CASCADE. (WARNING! Risk of unrecoverable data loss)" + order: 11 + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: + "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: + "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: + "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: + "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: + "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + Source7shiftsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-7shifts" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_7shifts + x-speakeasy-param-suppress-computed-diff: true + SourceAhaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-aha" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Aha + x-speakeasy-param-suppress-computed-diff: true + SourceAirbyteCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-airbyte" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Airbyte + x-speakeasy-param-suppress-computed-diff: true + SourceAircallCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-aircall" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Aircall + x-speakeasy-param-suppress-computed-diff: true + SourceAirtableCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-airtable" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Airtable + x-speakeasy-param-suppress-computed-diff: true + SourceAlgoliaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-algolia" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Algolia + x-speakeasy-param-suppress-computed-diff: true + SourceAmazonAdsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-amazon-ads" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_AmazonAds + x-speakeasy-param-suppress-computed-diff: true + SourceAmazonSellerPartnerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-amazon-seller-partner" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_AmazonSellerPartner + x-speakeasy-param-suppress-computed-diff: true + SourceAmazonSqsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-amazon-sqs" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_AmazonSqs + x-speakeasy-param-suppress-computed-diff: true + SourceAmplitudeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-amplitude" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Amplitude + x-speakeasy-param-suppress-computed-diff: true + SourceApifyDatasetCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-apify-dataset" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ApifyDataset + x-speakeasy-param-suppress-computed-diff: true + SourceAppcuesCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-appcues" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Appcues + x-speakeasy-param-suppress-computed-diff: true + SourceAppfiguresCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-appfigures" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Appfigures + x-speakeasy-param-suppress-computed-diff: true + SourceAppfollowCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-appfollow" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Appfollow + x-speakeasy-param-suppress-computed-diff: true + SourceAsanaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-asana" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Asana + x-speakeasy-param-suppress-computed-diff: true + SourceAuth0CreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-auth0" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Auth0 + x-speakeasy-param-suppress-computed-diff: true + SourceAwsCloudtrailCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-aws-cloudtrail" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_AwsCloudtrail + x-speakeasy-param-suppress-computed-diff: true + SourceAzureBlobStorageCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-azure-blob-storage" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_AzureBlobStorage + x-speakeasy-param-suppress-computed-diff: true + SourceAzureTableCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-azure-table" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_AzureTable + x-speakeasy-param-suppress-computed-diff: true + SourceBambooHrCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-bamboo-hr" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_BambooHr + x-speakeasy-param-suppress-computed-diff: true + SourceBasecampCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-basecamp" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Basecamp + x-speakeasy-param-suppress-computed-diff: true + SourceBeamerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-beamer" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Beamer + x-speakeasy-param-suppress-computed-diff: true + SourceBigqueryCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-bigquery" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Bigquery + x-speakeasy-param-suppress-computed-diff: true + SourceBingAdsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-bing-ads" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_BingAds + x-speakeasy-param-suppress-computed-diff: true + SourceBitlyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-bitly" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Bitly + x-speakeasy-param-suppress-computed-diff: true + SourceBraintreeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-braintree" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Braintree + x-speakeasy-param-suppress-computed-diff: true + SourceBrazeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-braze" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Braze + x-speakeasy-param-suppress-computed-diff: true + SourceBreezyHrCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-breezy-hr" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_BreezyHr + x-speakeasy-param-suppress-computed-diff: true + SourceBrevoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-brevo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Brevo + x-speakeasy-param-suppress-computed-diff: true + SourceBuildkiteCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-buildkite" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Buildkite + x-speakeasy-param-suppress-computed-diff: true + SourceBuzzsproutCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-buzzsprout" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Buzzsprout + x-speakeasy-param-suppress-computed-diff: true + SourceCalendlyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-calendly" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Calendly + x-speakeasy-param-suppress-computed-diff: true + SourceCannyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-canny" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Canny + x-speakeasy-param-suppress-computed-diff: true + SourceCartCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-cart" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Cart + x-speakeasy-param-suppress-computed-diff: true + SourceChameleonCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-chameleon" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Chameleon + x-speakeasy-param-suppress-computed-diff: true + SourceChargebeeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-chargebee" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Chargebee + x-speakeasy-param-suppress-computed-diff: true + SourceChartmogulCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-chartmogul" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Chartmogul + x-speakeasy-param-suppress-computed-diff: true + SourceCimisCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-cimis" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Cimis + x-speakeasy-param-suppress-computed-diff: true + SourceClazarCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-clazar" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Clazar + x-speakeasy-param-suppress-computed-diff: true + SourceClickhouseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-clickhouse" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Clickhouse + x-speakeasy-param-suppress-computed-diff: true + SourceClickupApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-clickup-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ClickupApi + x-speakeasy-param-suppress-computed-diff: true + SourceClockifyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-clockify" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Clockify + x-speakeasy-param-suppress-computed-diff: true + SourceCloseComCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-close-com" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_CloseCom + x-speakeasy-param-suppress-computed-diff: true + SourceCodaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-coda" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Coda + x-speakeasy-param-suppress-computed-diff: true + SourceCoinApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-coin-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_CoinApi + x-speakeasy-param-suppress-computed-diff: true + SourceCoinmarketcapCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-coinmarketcap" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Coinmarketcap + x-speakeasy-param-suppress-computed-diff: true + SourceConfigcatCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-configcat" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Configcat + x-speakeasy-param-suppress-computed-diff: true + SourceConfluenceCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-confluence" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Confluence + x-speakeasy-param-suppress-computed-diff: true + SourceConvexCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-convex" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Convex + x-speakeasy-param-suppress-computed-diff: true + SourceCustomerIoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-customer-io" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_CustomerIo + x-speakeasy-param-suppress-computed-diff: true + SourceDatadogCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-datadog" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Datadog + x-speakeasy-param-suppress-computed-diff: true + SourceDatascopeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-datascope" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Datascope + x-speakeasy-param-suppress-computed-diff: true + SourceDbtCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-dbt" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Dbt + x-speakeasy-param-suppress-computed-diff: true + SourceDelightedCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-delighted" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Delighted + x-speakeasy-param-suppress-computed-diff: true + SourceDixaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-dixa" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Dixa + x-speakeasy-param-suppress-computed-diff: true + SourceDockerhubCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-dockerhub" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Dockerhub + x-speakeasy-param-suppress-computed-diff: true + SourceDremioCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-dremio" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Dremio + x-speakeasy-param-suppress-computed-diff: true + SourceDropboxSignCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-dropbox-sign" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_DropboxSign + x-speakeasy-param-suppress-computed-diff: true + SourceDynamodbCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-dynamodb" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Dynamodb + x-speakeasy-param-suppress-computed-diff: true + SourceEmailoctopusCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-emailoctopus" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Emailoctopus + x-speakeasy-param-suppress-computed-diff: true + SourceEventbriteCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-eventbrite" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Eventbrite + x-speakeasy-param-suppress-computed-diff: true + SourceExchangeRatesCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-exchange-rates" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ExchangeRates + x-speakeasy-param-suppress-computed-diff: true + SourceEzofficeinventoryCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-ezofficeinventory" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Ezofficeinventory + x-speakeasy-param-suppress-computed-diff: true + SourceFacebookMarketingCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-facebook-marketing" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_FacebookMarketing + x-speakeasy-param-suppress-computed-diff: true + SourceFakerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-faker" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Faker + x-speakeasy-param-suppress-computed-diff: true + SourceFaunaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-fauna" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Fauna + x-speakeasy-param-suppress-computed-diff: true + SourceFileCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-file" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_File + x-speakeasy-param-suppress-computed-diff: true + SourceFireboltCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-firebolt" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Firebolt + x-speakeasy-param-suppress-computed-diff: true + SourceFleetioCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-fleetio" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Fleetio + x-speakeasy-param-suppress-computed-diff: true + SourceFreshcallerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-freshcaller" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Freshcaller + x-speakeasy-param-suppress-computed-diff: true + SourceFreshchatCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-freshchat" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Freshchat + x-speakeasy-param-suppress-computed-diff: true + SourceFreshdeskCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-freshdesk" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Freshdesk + x-speakeasy-param-suppress-computed-diff: true + SourceFreshsalesCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-freshsales" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Freshsales + x-speakeasy-param-suppress-computed-diff: true + SourceFrontCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-front" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Front + x-speakeasy-param-suppress-computed-diff: true + SourceGainsightPxCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-gainsight-px" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GainsightPx + x-speakeasy-param-suppress-computed-diff: true + SourceGcsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-gcs" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Gcs + x-speakeasy-param-suppress-computed-diff: true + SourceGetlagoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-getlago" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Getlago + x-speakeasy-param-suppress-computed-diff: true + SourceGithubCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-github" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Github + x-speakeasy-param-suppress-computed-diff: true + SourceGitlabCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-gitlab" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Gitlab + x-speakeasy-param-suppress-computed-diff: true + SourceGlassfrogCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-glassfrog" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Glassfrog + x-speakeasy-param-suppress-computed-diff: true + SourceGnewsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-gnews" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Gnews + x-speakeasy-param-suppress-computed-diff: true + SourceGoldcastCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-goldcast" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Goldcast + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleAdsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-ads" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleAds + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleAnalyticsDataApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-analytics-data-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleAnalyticsDataApi + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleDirectoryCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-directory" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleDirectory + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleDriveCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-drive" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleDrive + x-speakeasy-param-suppress-computed-diff: true + SourceGooglePagespeedInsightsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-pagespeed-insights" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GooglePagespeedInsights + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleSearchConsoleCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-search-console" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleSearchConsole + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleSheetsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-sheets" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleSheets + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleTasksCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-tasks" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleTasks + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleWebfontsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-google-webfonts" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_GoogleWebfonts + x-speakeasy-param-suppress-computed-diff: true + SourceGreenhouseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-greenhouse" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Greenhouse + x-speakeasy-param-suppress-computed-diff: true + SourceGridlyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-gridly" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Gridly + x-speakeasy-param-suppress-computed-diff: true + SourceGuruCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-guru" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Guru + x-speakeasy-param-suppress-computed-diff: true + SourceHardcodedRecordsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-hardcoded-records" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_HardcodedRecords + x-speakeasy-param-suppress-computed-diff: true + SourceHarvestCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-harvest" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Harvest + x-speakeasy-param-suppress-computed-diff: true + SourceHeightCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-height" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Height + x-speakeasy-param-suppress-computed-diff: true + SourceHibobCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-hibob" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Hibob + x-speakeasy-param-suppress-computed-diff: true + SourceHighLevelCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-high-level" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_HighLevel + x-speakeasy-param-suppress-computed-diff: true + SourceHubplannerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-hubplanner" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Hubplanner + x-speakeasy-param-suppress-computed-diff: true + SourceHubspotCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-hubspot" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Hubspot + x-speakeasy-param-suppress-computed-diff: true + SourceInsightlyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-insightly" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Insightly + x-speakeasy-param-suppress-computed-diff: true + SourceInstagramCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-instagram" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Instagram + x-speakeasy-param-suppress-computed-diff: true + SourceInstatusCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-instatus" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Instatus + x-speakeasy-param-suppress-computed-diff: true + SourceIntercomCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-intercom" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Intercom + x-speakeasy-param-suppress-computed-diff: true + SourceIp2whoisCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-ip2whois" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Ip2whois + x-speakeasy-param-suppress-computed-diff: true + SourceIterableCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-iterable" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Iterable + x-speakeasy-param-suppress-computed-diff: true + SourceJiraCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-jira" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Jira + x-speakeasy-param-suppress-computed-diff: true + SourceJotformCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-jotform" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Jotform + x-speakeasy-param-suppress-computed-diff: true + SourceK6CloudCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-k6-cloud" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_K6Cloud + x-speakeasy-param-suppress-computed-diff: true + SourceKissmetricsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-kissmetrics" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Kissmetrics + x-speakeasy-param-suppress-computed-diff: true + SourceKlarnaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-klarna" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Klarna + x-speakeasy-param-suppress-computed-diff: true + SourceKlaviyoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-klaviyo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Klaviyo + x-speakeasy-param-suppress-computed-diff: true + SourceKyveCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-kyve" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Kyve + x-speakeasy-param-suppress-computed-diff: true + SourceLaunchdarklyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-launchdarkly" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Launchdarkly + x-speakeasy-param-suppress-computed-diff: true + SourceLeadfeederCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-leadfeeder" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Leadfeeder + x-speakeasy-param-suppress-computed-diff: true + SourceLemlistCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-lemlist" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Lemlist + x-speakeasy-param-suppress-computed-diff: true + SourceLeverHiringCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-lever-hiring" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_LeverHiring + x-speakeasy-param-suppress-computed-diff: true + SourceLinkedinAdsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-linkedin-ads" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_LinkedinAds + x-speakeasy-param-suppress-computed-diff: true + SourceLinkedinPagesCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-linkedin-pages" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_LinkedinPages + x-speakeasy-param-suppress-computed-diff: true + SourceLinnworksCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-linnworks" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Linnworks + x-speakeasy-param-suppress-computed-diff: true + SourceLobCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-lob" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Lob + x-speakeasy-param-suppress-computed-diff: true + SourceLokaliseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-lokalise" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Lokalise + x-speakeasy-param-suppress-computed-diff: true + SourceLookerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-looker" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Looker + x-speakeasy-param-suppress-computed-diff: true + SourceLumaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-luma" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Luma + x-speakeasy-param-suppress-computed-diff: true + SourceMailchimpCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mailchimp" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mailchimp + x-speakeasy-param-suppress-computed-diff: true + SourceMailgunCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mailgun" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mailgun + x-speakeasy-param-suppress-computed-diff: true + SourceMailjetSmsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mailjet-sms" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_MailjetSms + x-speakeasy-param-suppress-computed-diff: true + SourceMarketoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-marketo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Marketo + x-speakeasy-param-suppress-computed-diff: true + SourceMetabaseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-metabase" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Metabase + x-speakeasy-param-suppress-computed-diff: true + SourceMicrosoftOnedriveCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-microsoft-onedrive" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_MicrosoftOnedrive + x-speakeasy-param-suppress-computed-diff: true + SourceMicrosoftSharepointCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-microsoft-sharepoint" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_MicrosoftSharepoint + x-speakeasy-param-suppress-computed-diff: true + SourceMicrosoftTeamsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-microsoft-teams" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_MicrosoftTeams + x-speakeasy-param-suppress-computed-diff: true + SourceMixpanelCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mixpanel" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mixpanel + x-speakeasy-param-suppress-computed-diff: true + SourceMondayCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-monday" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Monday + x-speakeasy-param-suppress-computed-diff: true + SourceMongodbV2CreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mongodb-v2" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_MongodbV2 + x-speakeasy-param-suppress-computed-diff: true + SourceMssqlCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mssql" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mssql + x-speakeasy-param-suppress-computed-diff: true + SourceMyHoursCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-my-hours" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_MyHours + x-speakeasy-param-suppress-computed-diff: true + SourceMysqlCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-mysql" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Mysql + x-speakeasy-param-suppress-computed-diff: true + SourceNetsuiteCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-netsuite" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Netsuite + x-speakeasy-param-suppress-computed-diff: true + SourceNorthpassLmsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-northpass-lms" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_NorthpassLms + x-speakeasy-param-suppress-computed-diff: true + SourceNotionCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-notion" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Notion + x-speakeasy-param-suppress-computed-diff: true + SourceNylasCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-nylas" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Nylas + x-speakeasy-param-suppress-computed-diff: true + SourceNytimesCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-nytimes" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Nytimes + x-speakeasy-param-suppress-computed-diff: true + SourceOktaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-okta" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Okta + x-speakeasy-param-suppress-computed-diff: true + SourceOmnisendCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-omnisend" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Omnisend + x-speakeasy-param-suppress-computed-diff: true + SourceOnesignalCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-onesignal" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Onesignal + x-speakeasy-param-suppress-computed-diff: true + SourceOracleCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-oracle" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Oracle + x-speakeasy-param-suppress-computed-diff: true + SourceOrbCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-orb" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Orb + x-speakeasy-param-suppress-computed-diff: true + SourceOrbitCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-orbit" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Orbit + x-speakeasy-param-suppress-computed-diff: true + SourceOutbrainAmplifyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-outbrain-amplify" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_OutbrainAmplify + x-speakeasy-param-suppress-computed-diff: true + SourceOutreachCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-outreach" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Outreach + x-speakeasy-param-suppress-computed-diff: true + SourcePaypalTransactionCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-paypal-transaction" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_PaypalTransaction + x-speakeasy-param-suppress-computed-diff: true + SourcePaystackCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-paystack" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Paystack + x-speakeasy-param-suppress-computed-diff: true + SourcePendoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pendo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pendo + x-speakeasy-param-suppress-computed-diff: true + SourcePennylaneCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pennylane" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pennylane + x-speakeasy-param-suppress-computed-diff: true + SourcePersistiqCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-persistiq" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Persistiq + x-speakeasy-param-suppress-computed-diff: true + SourcePexelsApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pexels-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_PexelsApi + x-speakeasy-param-suppress-computed-diff: true + SourcePicqerCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-picqer" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Picqer + x-speakeasy-param-suppress-computed-diff: true + SourcePinterestCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pinterest" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pinterest + x-speakeasy-param-suppress-computed-diff: true + SourcePipedriveCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pipedrive" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pipedrive + x-speakeasy-param-suppress-computed-diff: true + SourcePiwikCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-piwik" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Piwik + x-speakeasy-param-suppress-computed-diff: true + SourcePlanhatCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-planhat" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Planhat + x-speakeasy-param-suppress-computed-diff: true + SourcePocketCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pocket" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pocket + x-speakeasy-param-suppress-computed-diff: true + SourcePokeapiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pokeapi" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pokeapi + x-speakeasy-param-suppress-computed-diff: true + SourcePolygonStockApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-polygon-stock-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_PolygonStockApi + x-speakeasy-param-suppress-computed-diff: true + SourcePostgresCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-postgres" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Postgres + x-speakeasy-param-suppress-computed-diff: true + SourcePosthogCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-posthog" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Posthog + x-speakeasy-param-suppress-computed-diff: true + SourcePostmarkappCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-postmarkapp" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Postmarkapp + x-speakeasy-param-suppress-computed-diff: true + SourcePrestashopCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-prestashop" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Prestashop + x-speakeasy-param-suppress-computed-diff: true + SourceProductboardCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-productboard" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Productboard + x-speakeasy-param-suppress-computed-diff: true + SourceProductiveCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-productive" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Productive + x-speakeasy-param-suppress-computed-diff: true + SourcePypiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-pypi" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Pypi + x-speakeasy-param-suppress-computed-diff: true + SourceQualarooCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-qualaroo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Qualaroo + x-speakeasy-param-suppress-computed-diff: true + SourceRailzCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-railz" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Railz + x-speakeasy-param-suppress-computed-diff: true + SourceRechargeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-recharge" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Recharge + x-speakeasy-param-suppress-computed-diff: true + SourceRecreationCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-recreation" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Recreation + x-speakeasy-param-suppress-computed-diff: true + SourceRecruiteeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-recruitee" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Recruitee + x-speakeasy-param-suppress-computed-diff: true + SourceRecurlyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-recurly" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Recurly + x-speakeasy-param-suppress-computed-diff: true + SourceRedditCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-reddit" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Reddit + x-speakeasy-param-suppress-computed-diff: true + SourceRedshiftCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-redshift" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Redshift + x-speakeasy-param-suppress-computed-diff: true + SourceRetentlyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-retently" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Retently + x-speakeasy-param-suppress-computed-diff: true + SourceRkiCovidCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-rki-covid" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_RkiCovid + x-speakeasy-param-suppress-computed-diff: true + SourceRollbarCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-rollbar" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Rollbar + x-speakeasy-param-suppress-computed-diff: true + SourceRssCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-rss" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Rss + x-speakeasy-param-suppress-computed-diff: true + SourceS3CreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-s3" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_S3 + x-speakeasy-param-suppress-computed-diff: true + SourceSalesforceCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-salesforce" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Salesforce + x-speakeasy-param-suppress-computed-diff: true + SourceSalesloftCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-salesloft" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Salesloft + x-speakeasy-param-suppress-computed-diff: true + SourceSapFieldglassCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sap-fieldglass" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SapFieldglass + x-speakeasy-param-suppress-computed-diff: true + SourceSavvycalCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-savvycal" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Savvycal + x-speakeasy-param-suppress-computed-diff: true + SourceScryfallCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-scryfall" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Scryfall + x-speakeasy-param-suppress-computed-diff: true + SourceSecodaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-secoda" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Secoda + x-speakeasy-param-suppress-computed-diff: true + SourceSendgridCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sendgrid" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Sendgrid + x-speakeasy-param-suppress-computed-diff: true + SourceSendinblueCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sendinblue" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Sendinblue + x-speakeasy-param-suppress-computed-diff: true + SourceSenseforceCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-senseforce" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Senseforce + x-speakeasy-param-suppress-computed-diff: true + SourceSentryCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sentry" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Sentry + x-speakeasy-param-suppress-computed-diff: true + SourceSftpCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sftp" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Sftp + x-speakeasy-param-suppress-computed-diff: true + SourceSftpBulkCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sftp-bulk" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SftpBulk + x-speakeasy-param-suppress-computed-diff: true + SourceShopifyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-shopify" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Shopify + x-speakeasy-param-suppress-computed-diff: true + SourceShortcutCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-shortcut" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Shortcut + x-speakeasy-param-suppress-computed-diff: true + SourceShortioCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-shortio" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Shortio + x-speakeasy-param-suppress-computed-diff: true + SourceSlackCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-slack" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Slack + x-speakeasy-param-suppress-computed-diff: true + SourceSmailyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-smaily" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Smaily + x-speakeasy-param-suppress-computed-diff: true + SourceSmartengageCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-smartengage" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Smartengage + x-speakeasy-param-suppress-computed-diff: true + SourceSmartsheetsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-smartsheets" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Smartsheets + x-speakeasy-param-suppress-computed-diff: true + SourceSnapchatMarketingCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-snapchat-marketing" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SnapchatMarketing + x-speakeasy-param-suppress-computed-diff: true + SourceSnowflakeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-snowflake" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Snowflake + x-speakeasy-param-suppress-computed-diff: true + SourceSonarCloudCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-sonar-cloud" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SonarCloud + x-speakeasy-param-suppress-computed-diff: true + SourceSpacexApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-spacex-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SpacexApi + x-speakeasy-param-suppress-computed-diff: true + SourceSplitIoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-split-io" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SplitIo + x-speakeasy-param-suppress-computed-diff: true + SourceSquareCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-square" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Square + x-speakeasy-param-suppress-computed-diff: true + SourceStravaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-strava" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Strava + x-speakeasy-param-suppress-computed-diff: true + SourceStripeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-stripe" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Stripe + x-speakeasy-param-suppress-computed-diff: true + SourceSurveySparrowCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-survey-sparrow" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_SurveySparrow + x-speakeasy-param-suppress-computed-diff: true + SourceSurveymonkeyCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-surveymonkey" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Surveymonkey + x-speakeasy-param-suppress-computed-diff: true + SourceSurvicateCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-survicate" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Survicate + x-speakeasy-param-suppress-computed-diff: true + SourceTeamworkCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-teamwork" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Teamwork + x-speakeasy-param-suppress-computed-diff: true + SourceTempoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-tempo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Tempo + x-speakeasy-param-suppress-computed-diff: true + SourceTheGuardianApiCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-the-guardian-api" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_TheGuardianApi + x-speakeasy-param-suppress-computed-diff: true + SourceTiktokMarketingCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-tiktok-marketing" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_TiktokMarketing + x-speakeasy-param-suppress-computed-diff: true + SourceTrelloCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-trello" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Trello + x-speakeasy-param-suppress-computed-diff: true + SourceTrustpilotCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-trustpilot" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Trustpilot + x-speakeasy-param-suppress-computed-diff: true + SourceTvmazeScheduleCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-tvmaze-schedule" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_TvmazeSchedule + x-speakeasy-param-suppress-computed-diff: true + SourceTwilioCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-twilio" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Twilio + x-speakeasy-param-suppress-computed-diff: true + SourceTwilioTaskrouterCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-twilio-taskrouter" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_TwilioTaskrouter + x-speakeasy-param-suppress-computed-diff: true + SourceTwitterCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-twitter" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Twitter + x-speakeasy-param-suppress-computed-diff: true + SourceTypeformCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-typeform" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Typeform + x-speakeasy-param-suppress-computed-diff: true + SourceUsCensusCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-us-census" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_UsCensus + x-speakeasy-param-suppress-computed-diff: true + SourceVantageCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-vantage" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Vantage + x-speakeasy-param-suppress-computed-diff: true + SourceVwoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-vwo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Vwo + x-speakeasy-param-suppress-computed-diff: true + SourceWebflowCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-webflow" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Webflow + x-speakeasy-param-suppress-computed-diff: true + SourceWhenIWorkCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-when-i-work" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_WhenIWork + x-speakeasy-param-suppress-computed-diff: true + SourceWhiskyHunterCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-whisky-hunter" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_WhiskyHunter + x-speakeasy-param-suppress-computed-diff: true + SourceWikipediaPageviewsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-wikipedia-pageviews" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_WikipediaPageviews + x-speakeasy-param-suppress-computed-diff: true + SourceWoocommerceCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-woocommerce" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Woocommerce + x-speakeasy-param-suppress-computed-diff: true + SourceXkcdCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-xkcd" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Xkcd + x-speakeasy-param-suppress-computed-diff: true + SourceYandexMetricaCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-yandex-metrica" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_YandexMetrica + x-speakeasy-param-suppress-computed-diff: true + SourceYotpoCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-yotpo" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Yotpo + x-speakeasy-param-suppress-computed-diff: true + SourceYoutubeAnalyticsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-youtube-analytics" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_YoutubeAnalytics + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskChatCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zendesk-chat" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZendeskChat + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskSunshineCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zendesk-sunshine" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZendeskSunshine + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskSupportCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zendesk-support" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZendeskSupport + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskTalkCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zendesk-talk" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZendeskTalk + x-speakeasy-param-suppress-computed-diff: true + SourceZenloopCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zenloop" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Zenloop + x-speakeasy-param-suppress-computed-diff: true + SourceZohoCrmCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoho-crm" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_ZohoCrm + x-speakeasy-param-suppress-computed-diff: true + SourceZoomCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-zoom" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Zoom + x-speakeasy-param-suppress-computed-diff: true + SourceCustomCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the source e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.sourceType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/source-custom" + secretId: + description: + "Optional secretID obtained through the public API OAuth redirect\ + \ flow." + type: "string" + x-speakeasy-entity: Source_Custom + x-speakeasy-param-suppress-computed-diff: true + DestinationAstraCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-astra" + x-speakeasy-entity: Destination_Astra + x-speakeasy-param-suppress-computed-diff: true + DestinationAwsDatalakeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-aws-datalake" + x-speakeasy-entity: Destination_AwsDatalake + x-speakeasy-param-suppress-computed-diff: true + DestinationAzureBlobStorageCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-azure-blob-storage" + x-speakeasy-entity: Destination_AzureBlobStorage + x-speakeasy-param-suppress-computed-diff: true + DestinationBigqueryCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-bigquery" + x-speakeasy-entity: Destination_Bigquery + x-speakeasy-param-suppress-computed-diff: true + DestinationClickhouseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-clickhouse" + x-speakeasy-entity: Destination_Clickhouse + x-speakeasy-param-suppress-computed-diff: true + DestinationConvexCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-convex" + x-speakeasy-entity: Destination_Convex + x-speakeasy-param-suppress-computed-diff: true + DestinationDatabricksCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-databricks" + x-speakeasy-entity: Destination_Databricks + x-speakeasy-param-suppress-computed-diff: true + DestinationDevNullCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-dev-null" + x-speakeasy-entity: Destination_DevNull + x-speakeasy-param-suppress-computed-diff: true + DestinationDuckdbCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-duckdb" + x-speakeasy-entity: Destination_Duckdb + x-speakeasy-param-suppress-computed-diff: true + DestinationDynamodbCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-dynamodb" + x-speakeasy-entity: Destination_Dynamodb + x-speakeasy-param-suppress-computed-diff: true + DestinationElasticsearchCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-elasticsearch" + x-speakeasy-entity: Destination_Elasticsearch + x-speakeasy-param-suppress-computed-diff: true + DestinationFireboltCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-firebolt" + x-speakeasy-entity: Destination_Firebolt + x-speakeasy-param-suppress-computed-diff: true + DestinationFirestoreCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-firestore" + x-speakeasy-entity: Destination_Firestore + x-speakeasy-param-suppress-computed-diff: true + DestinationGcsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-gcs" + x-speakeasy-entity: Destination_Gcs + x-speakeasy-param-suppress-computed-diff: true + DestinationGoogleSheetsCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-google-sheets" + x-speakeasy-entity: Destination_GoogleSheets + x-speakeasy-param-suppress-computed-diff: true + DestinationIcebergCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-iceberg" + x-speakeasy-entity: Destination_Iceberg + x-speakeasy-param-suppress-computed-diff: true + DestinationMilvusCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-milvus" + x-speakeasy-entity: Destination_Milvus + x-speakeasy-param-suppress-computed-diff: true + DestinationMongodbCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-mongodb" + x-speakeasy-entity: Destination_Mongodb + x-speakeasy-param-suppress-computed-diff: true + DestinationMssqlCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-mssql" + x-speakeasy-entity: Destination_Mssql + x-speakeasy-param-suppress-computed-diff: true + DestinationMysqlCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-mysql" + x-speakeasy-entity: Destination_Mysql + x-speakeasy-param-suppress-computed-diff: true + DestinationOracleCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-oracle" + x-speakeasy-entity: Destination_Oracle + x-speakeasy-param-suppress-computed-diff: true + DestinationPgvectorCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-pgvector" + x-speakeasy-entity: Destination_Pgvector + x-speakeasy-param-suppress-computed-diff: true + DestinationPineconeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-pinecone" + x-speakeasy-entity: Destination_Pinecone + x-speakeasy-param-suppress-computed-diff: true + DestinationPostgresCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-postgres" + x-speakeasy-entity: Destination_Postgres + x-speakeasy-param-suppress-computed-diff: true + DestinationPubsubCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-pubsub" + x-speakeasy-entity: Destination_Pubsub + x-speakeasy-param-suppress-computed-diff: true + DestinationQdrantCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-qdrant" + x-speakeasy-entity: Destination_Qdrant + x-speakeasy-param-suppress-computed-diff: true + DestinationRedisCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-redis" + x-speakeasy-entity: Destination_Redis + x-speakeasy-param-suppress-computed-diff: true + DestinationRedshiftCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-redshift" + x-speakeasy-entity: Destination_Redshift + x-speakeasy-param-suppress-computed-diff: true + DestinationS3CreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-s3" + x-speakeasy-entity: Destination_S3 + x-speakeasy-param-suppress-computed-diff: true + DestinationS3GlueCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-s3-glue" + x-speakeasy-entity: Destination_S3Glue + x-speakeasy-param-suppress-computed-diff: true + DestinationSftpJsonCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-sftp-json" + x-speakeasy-entity: Destination_SftpJson + x-speakeasy-param-suppress-computed-diff: true + DestinationSnowflakeCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-snowflake" + x-speakeasy-entity: Destination_Snowflake + x-speakeasy-param-suppress-computed-diff: true + DestinationSnowflakeCortexCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-snowflake-cortex" + x-speakeasy-entity: Destination_SnowflakeCortex + x-speakeasy-param-suppress-computed-diff: true + DestinationTeradataCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-teradata" + x-speakeasy-entity: Destination_Teradata + x-speakeasy-param-suppress-computed-diff: true + DestinationTimeplusCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-timeplus" + x-speakeasy-entity: Destination_Timeplus + x-speakeasy-param-suppress-computed-diff: true + DestinationTypesenseCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-typesense" + x-speakeasy-entity: Destination_Typesense + x-speakeasy-param-suppress-computed-diff: true + DestinationVectaraCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-vectara" + x-speakeasy-entity: Destination_Vectara + x-speakeasy-param-suppress-computed-diff: true + DestinationWeaviateCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-weaviate" + x-speakeasy-entity: Destination_Weaviate + x-speakeasy-param-suppress-computed-diff: true + DestinationYellowbrickCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-yellowbrick" + x-speakeasy-entity: Destination_Yellowbrick + x-speakeasy-param-suppress-computed-diff: true + DestinationCustomCreateRequest: + required: + - name + - workspaceId + - configuration + type: object + properties: + name: + description: Name of the destination e.g. dev-mysql-instance. + type: string + definitionId: + description: The UUID of the connector definition. One of configuration.destinationType or definitionId must be provided. + format: uuid + type: string + workspaceId: + format: uuid + type: string + configuration: + $ref: "#/components/schemas/destination-custom" + x-speakeasy-entity: Destination_Custom + x-speakeasy-param-suppress-computed-diff: true + Source7shiftsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-7shifts-update" + x-speakeasy-entity: Source_7shifts + x-speakeasy-param-suppress-computed-diff: true + SourceAhaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-aha-update" + x-speakeasy-entity: Source_Aha + x-speakeasy-param-suppress-computed-diff: true + SourceAirbytePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-airbyte-update" + x-speakeasy-entity: Source_Airbyte + x-speakeasy-param-suppress-computed-diff: true + SourceAircallPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-aircall-update" + x-speakeasy-entity: Source_Aircall + x-speakeasy-param-suppress-computed-diff: true + SourceAirtablePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-airtable-update" + x-speakeasy-entity: Source_Airtable + x-speakeasy-param-suppress-computed-diff: true + SourceAlgoliaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-algolia-update" + x-speakeasy-entity: Source_Algolia + x-speakeasy-param-suppress-computed-diff: true + SourceAmazonAdsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-amazon-ads-update" + x-speakeasy-entity: Source_AmazonAds + x-speakeasy-param-suppress-computed-diff: true + SourceAmazonSellerPartnerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-amazon-seller-partner-update" + x-speakeasy-entity: Source_AmazonSellerPartner + x-speakeasy-param-suppress-computed-diff: true + SourceAmazonSqsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-amazon-sqs-update" + x-speakeasy-entity: Source_AmazonSqs + x-speakeasy-param-suppress-computed-diff: true + SourceAmplitudePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-amplitude-update" + x-speakeasy-entity: Source_Amplitude + x-speakeasy-param-suppress-computed-diff: true + SourceApifyDatasetPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-apify-dataset-update" + x-speakeasy-entity: Source_ApifyDataset + x-speakeasy-param-suppress-computed-diff: true + SourceAppcuesPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-appcues-update" + x-speakeasy-entity: Source_Appcues + x-speakeasy-param-suppress-computed-diff: true + SourceAppfiguresPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-appfigures-update" + x-speakeasy-entity: Source_Appfigures + x-speakeasy-param-suppress-computed-diff: true + SourceAppfollowPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-appfollow-update" + x-speakeasy-entity: Source_Appfollow + x-speakeasy-param-suppress-computed-diff: true + SourceAsanaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-asana-update" + x-speakeasy-entity: Source_Asana + x-speakeasy-param-suppress-computed-diff: true + SourceAuth0PutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-auth0-update" + x-speakeasy-entity: Source_Auth0 + x-speakeasy-param-suppress-computed-diff: true + SourceAwsCloudtrailPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-aws-cloudtrail-update" + x-speakeasy-entity: Source_AwsCloudtrail + x-speakeasy-param-suppress-computed-diff: true + SourceAzureBlobStoragePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-azure-blob-storage-update" + x-speakeasy-entity: Source_AzureBlobStorage + x-speakeasy-param-suppress-computed-diff: true + SourceAzureTablePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-azure-table-update" + x-speakeasy-entity: Source_AzureTable + x-speakeasy-param-suppress-computed-diff: true + SourceBambooHrPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-bamboo-hr-update" + x-speakeasy-entity: Source_BambooHr + x-speakeasy-param-suppress-computed-diff: true + SourceBasecampPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-basecamp-update" + x-speakeasy-entity: Source_Basecamp + x-speakeasy-param-suppress-computed-diff: true + SourceBeamerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-beamer-update" + x-speakeasy-entity: Source_Beamer + x-speakeasy-param-suppress-computed-diff: true + SourceBigqueryPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-bigquery-update" + x-speakeasy-entity: Source_Bigquery + x-speakeasy-param-suppress-computed-diff: true + SourceBingAdsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-bing-ads-update" + x-speakeasy-entity: Source_BingAds + x-speakeasy-param-suppress-computed-diff: true + SourceBitlyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-bitly-update" + x-speakeasy-entity: Source_Bitly + x-speakeasy-param-suppress-computed-diff: true + SourceBraintreePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-braintree-update" + x-speakeasy-entity: Source_Braintree + x-speakeasy-param-suppress-computed-diff: true + SourceBrazePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-braze-update" + x-speakeasy-entity: Source_Braze + x-speakeasy-param-suppress-computed-diff: true + SourceBreezyHrPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-breezy-hr-update" + x-speakeasy-entity: Source_BreezyHr + x-speakeasy-param-suppress-computed-diff: true + SourceBrevoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-brevo-update" + x-speakeasy-entity: Source_Brevo + x-speakeasy-param-suppress-computed-diff: true + SourceBuildkitePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-buildkite-update" + x-speakeasy-entity: Source_Buildkite + x-speakeasy-param-suppress-computed-diff: true + SourceBuzzsproutPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-buzzsprout-update" + x-speakeasy-entity: Source_Buzzsprout + x-speakeasy-param-suppress-computed-diff: true + SourceCalendlyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-calendly-update" + x-speakeasy-entity: Source_Calendly + x-speakeasy-param-suppress-computed-diff: true + SourceCannyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-canny-update" + x-speakeasy-entity: Source_Canny + x-speakeasy-param-suppress-computed-diff: true + SourceCartPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-cart-update" + x-speakeasy-entity: Source_Cart + x-speakeasy-param-suppress-computed-diff: true + SourceChameleonPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-chameleon-update" + x-speakeasy-entity: Source_Chameleon + x-speakeasy-param-suppress-computed-diff: true + SourceChargebeePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-chargebee-update" + x-speakeasy-entity: Source_Chargebee + x-speakeasy-param-suppress-computed-diff: true + SourceChartmogulPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-chartmogul-update" + x-speakeasy-entity: Source_Chartmogul + x-speakeasy-param-suppress-computed-diff: true + SourceCimisPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-cimis-update" + x-speakeasy-entity: Source_Cimis + x-speakeasy-param-suppress-computed-diff: true + SourceClazarPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-clazar-update" + x-speakeasy-entity: Source_Clazar + x-speakeasy-param-suppress-computed-diff: true + SourceClickhousePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-clickhouse-update" + x-speakeasy-entity: Source_Clickhouse + x-speakeasy-param-suppress-computed-diff: true + SourceClickupApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-clickup-api-update" + x-speakeasy-entity: Source_ClickupApi + x-speakeasy-param-suppress-computed-diff: true + SourceClockifyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-clockify-update" + x-speakeasy-entity: Source_Clockify + x-speakeasy-param-suppress-computed-diff: true + SourceCloseComPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-close-com-update" + x-speakeasy-entity: Source_CloseCom + x-speakeasy-param-suppress-computed-diff: true + SourceCodaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-coda-update" + x-speakeasy-entity: Source_Coda + x-speakeasy-param-suppress-computed-diff: true + SourceCoinApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-coin-api-update" + x-speakeasy-entity: Source_CoinApi + x-speakeasy-param-suppress-computed-diff: true + SourceCoinmarketcapPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-coinmarketcap-update" + x-speakeasy-entity: Source_Coinmarketcap + x-speakeasy-param-suppress-computed-diff: true + SourceConfigcatPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-configcat-update" + x-speakeasy-entity: Source_Configcat + x-speakeasy-param-suppress-computed-diff: true + SourceConfluencePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-confluence-update" + x-speakeasy-entity: Source_Confluence + x-speakeasy-param-suppress-computed-diff: true + SourceConvexPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-convex-update" + x-speakeasy-entity: Source_Convex + x-speakeasy-param-suppress-computed-diff: true + SourceCustomerIoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-customer-io-update" + x-speakeasy-entity: Source_CustomerIo + x-speakeasy-param-suppress-computed-diff: true + SourceDatadogPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-datadog-update" + x-speakeasy-entity: Source_Datadog + x-speakeasy-param-suppress-computed-diff: true + SourceDatascopePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-datascope-update" + x-speakeasy-entity: Source_Datascope + x-speakeasy-param-suppress-computed-diff: true + SourceDbtPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-dbt-update" + x-speakeasy-entity: Source_Dbt + x-speakeasy-param-suppress-computed-diff: true + SourceDelightedPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-delighted-update" + x-speakeasy-entity: Source_Delighted + x-speakeasy-param-suppress-computed-diff: true + SourceDixaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-dixa-update" + x-speakeasy-entity: Source_Dixa + x-speakeasy-param-suppress-computed-diff: true + SourceDockerhubPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-dockerhub-update" + x-speakeasy-entity: Source_Dockerhub + x-speakeasy-param-suppress-computed-diff: true + SourceDremioPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-dremio-update" + x-speakeasy-entity: Source_Dremio + x-speakeasy-param-suppress-computed-diff: true + SourceDropboxSignPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-dropbox-sign-update" + x-speakeasy-entity: Source_DropboxSign + x-speakeasy-param-suppress-computed-diff: true + SourceDynamodbPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-dynamodb-update" + x-speakeasy-entity: Source_Dynamodb + x-speakeasy-param-suppress-computed-diff: true + SourceEmailoctopusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-emailoctopus-update" + x-speakeasy-entity: Source_Emailoctopus + x-speakeasy-param-suppress-computed-diff: true + SourceEventbritePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-eventbrite-update" + x-speakeasy-entity: Source_Eventbrite + x-speakeasy-param-suppress-computed-diff: true + SourceExchangeRatesPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-exchange-rates-update" + x-speakeasy-entity: Source_ExchangeRates + x-speakeasy-param-suppress-computed-diff: true + SourceEzofficeinventoryPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-ezofficeinventory-update" + x-speakeasy-entity: Source_Ezofficeinventory + x-speakeasy-param-suppress-computed-diff: true + SourceFacebookMarketingPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-facebook-marketing-update" + x-speakeasy-entity: Source_FacebookMarketing + x-speakeasy-param-suppress-computed-diff: true + SourceFakerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-faker-update" + x-speakeasy-entity: Source_Faker + x-speakeasy-param-suppress-computed-diff: true + SourceFaunaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-fauna-update" + x-speakeasy-entity: Source_Fauna + x-speakeasy-param-suppress-computed-diff: true + SourceFilePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-file-update" + x-speakeasy-entity: Source_File + x-speakeasy-param-suppress-computed-diff: true + SourceFireboltPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-firebolt-update" + x-speakeasy-entity: Source_Firebolt + x-speakeasy-param-suppress-computed-diff: true + SourceFleetioPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-fleetio-update" + x-speakeasy-entity: Source_Fleetio + x-speakeasy-param-suppress-computed-diff: true + SourceFreshcallerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-freshcaller-update" + x-speakeasy-entity: Source_Freshcaller + x-speakeasy-param-suppress-computed-diff: true + SourceFreshchatPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-freshchat-update" + x-speakeasy-entity: Source_Freshchat + x-speakeasy-param-suppress-computed-diff: true + SourceFreshdeskPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-freshdesk-update" + x-speakeasy-entity: Source_Freshdesk + x-speakeasy-param-suppress-computed-diff: true + SourceFreshsalesPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-freshsales-update" + x-speakeasy-entity: Source_Freshsales + x-speakeasy-param-suppress-computed-diff: true + SourceFrontPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-front-update" + x-speakeasy-entity: Source_Front + x-speakeasy-param-suppress-computed-diff: true + SourceGainsightPxPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-gainsight-px-update" + x-speakeasy-entity: Source_GainsightPx + x-speakeasy-param-suppress-computed-diff: true + SourceGcsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-gcs-update" + x-speakeasy-entity: Source_Gcs + x-speakeasy-param-suppress-computed-diff: true + SourceGetlagoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-getlago-update" + x-speakeasy-entity: Source_Getlago + x-speakeasy-param-suppress-computed-diff: true + SourceGithubPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-github-update" + x-speakeasy-entity: Source_Github + x-speakeasy-param-suppress-computed-diff: true + SourceGitlabPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-gitlab-update" + x-speakeasy-entity: Source_Gitlab + x-speakeasy-param-suppress-computed-diff: true + SourceGlassfrogPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-glassfrog-update" + x-speakeasy-entity: Source_Glassfrog + x-speakeasy-param-suppress-computed-diff: true + SourceGnewsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-gnews-update" + x-speakeasy-entity: Source_Gnews + x-speakeasy-param-suppress-computed-diff: true + SourceGoldcastPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-goldcast-update" + x-speakeasy-entity: Source_Goldcast + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleAdsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-ads-update" + x-speakeasy-entity: Source_GoogleAds + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleAnalyticsDataApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-analytics-data-api-update" + x-speakeasy-entity: Source_GoogleAnalyticsDataApi + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleDirectoryPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-directory-update" + x-speakeasy-entity: Source_GoogleDirectory + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleDrivePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-drive-update" + x-speakeasy-entity: Source_GoogleDrive + x-speakeasy-param-suppress-computed-diff: true + SourceGooglePagespeedInsightsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-pagespeed-insights-update" + x-speakeasy-entity: Source_GooglePagespeedInsights + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleSearchConsolePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-search-console-update" + x-speakeasy-entity: Source_GoogleSearchConsole + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleSheetsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-sheets-update" + x-speakeasy-entity: Source_GoogleSheets + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleTasksPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-tasks-update" + x-speakeasy-entity: Source_GoogleTasks + x-speakeasy-param-suppress-computed-diff: true + SourceGoogleWebfontsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-google-webfonts-update" + x-speakeasy-entity: Source_GoogleWebfonts + x-speakeasy-param-suppress-computed-diff: true + SourceGreenhousePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-greenhouse-update" + x-speakeasy-entity: Source_Greenhouse + x-speakeasy-param-suppress-computed-diff: true + SourceGridlyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-gridly-update" + x-speakeasy-entity: Source_Gridly + x-speakeasy-param-suppress-computed-diff: true + SourceGuruPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-guru-update" + x-speakeasy-entity: Source_Guru + x-speakeasy-param-suppress-computed-diff: true + SourceHardcodedRecordsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-hardcoded-records-update" + x-speakeasy-entity: Source_HardcodedRecords + x-speakeasy-param-suppress-computed-diff: true + SourceHarvestPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-harvest-update" + x-speakeasy-entity: Source_Harvest + x-speakeasy-param-suppress-computed-diff: true + SourceHeightPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-height-update" + x-speakeasy-entity: Source_Height + x-speakeasy-param-suppress-computed-diff: true + SourceHibobPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-hibob-update" + x-speakeasy-entity: Source_Hibob + x-speakeasy-param-suppress-computed-diff: true + SourceHighLevelPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-high-level-update" + x-speakeasy-entity: Source_HighLevel + x-speakeasy-param-suppress-computed-diff: true + SourceHubplannerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-hubplanner-update" + x-speakeasy-entity: Source_Hubplanner + x-speakeasy-param-suppress-computed-diff: true + SourceHubspotPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-hubspot-update" + x-speakeasy-entity: Source_Hubspot + x-speakeasy-param-suppress-computed-diff: true + SourceInsightlyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-insightly-update" + x-speakeasy-entity: Source_Insightly + x-speakeasy-param-suppress-computed-diff: true + SourceInstagramPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-instagram-update" + x-speakeasy-entity: Source_Instagram + x-speakeasy-param-suppress-computed-diff: true + SourceInstatusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-instatus-update" + x-speakeasy-entity: Source_Instatus + x-speakeasy-param-suppress-computed-diff: true + SourceIntercomPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-intercom-update" + x-speakeasy-entity: Source_Intercom + x-speakeasy-param-suppress-computed-diff: true + SourceIp2whoisPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-ip2whois-update" + x-speakeasy-entity: Source_Ip2whois + x-speakeasy-param-suppress-computed-diff: true + SourceIterablePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-iterable-update" + x-speakeasy-entity: Source_Iterable + x-speakeasy-param-suppress-computed-diff: true + SourceJiraPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-jira-update" + x-speakeasy-entity: Source_Jira + x-speakeasy-param-suppress-computed-diff: true + SourceJotformPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-jotform-update" + x-speakeasy-entity: Source_Jotform + x-speakeasy-param-suppress-computed-diff: true + SourceK6CloudPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-k6-cloud-update" + x-speakeasy-entity: Source_K6Cloud + x-speakeasy-param-suppress-computed-diff: true + SourceKissmetricsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-kissmetrics-update" + x-speakeasy-entity: Source_Kissmetrics + x-speakeasy-param-suppress-computed-diff: true + SourceKlarnaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-klarna-update" + x-speakeasy-entity: Source_Klarna + x-speakeasy-param-suppress-computed-diff: true + SourceKlaviyoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-klaviyo-update" + x-speakeasy-entity: Source_Klaviyo + x-speakeasy-param-suppress-computed-diff: true + SourceKyvePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-kyve-update" + x-speakeasy-entity: Source_Kyve + x-speakeasy-param-suppress-computed-diff: true + SourceLaunchdarklyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-launchdarkly-update" + x-speakeasy-entity: Source_Launchdarkly + x-speakeasy-param-suppress-computed-diff: true + SourceLeadfeederPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-leadfeeder-update" + x-speakeasy-entity: Source_Leadfeeder + x-speakeasy-param-suppress-computed-diff: true + SourceLemlistPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-lemlist-update" + x-speakeasy-entity: Source_Lemlist + x-speakeasy-param-suppress-computed-diff: true + SourceLeverHiringPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-lever-hiring-update" + x-speakeasy-entity: Source_LeverHiring + x-speakeasy-param-suppress-computed-diff: true + SourceLinkedinAdsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-linkedin-ads-update" + x-speakeasy-entity: Source_LinkedinAds + x-speakeasy-param-suppress-computed-diff: true + SourceLinkedinPagesPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-linkedin-pages-update" + x-speakeasy-entity: Source_LinkedinPages + x-speakeasy-param-suppress-computed-diff: true + SourceLinnworksPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-linnworks-update" + x-speakeasy-entity: Source_Linnworks + x-speakeasy-param-suppress-computed-diff: true + SourceLobPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-lob-update" + x-speakeasy-entity: Source_Lob + x-speakeasy-param-suppress-computed-diff: true + SourceLokalisePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-lokalise-update" + x-speakeasy-entity: Source_Lokalise + x-speakeasy-param-suppress-computed-diff: true + SourceLookerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-looker-update" + x-speakeasy-entity: Source_Looker + x-speakeasy-param-suppress-computed-diff: true + SourceLumaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-luma-update" + x-speakeasy-entity: Source_Luma + x-speakeasy-param-suppress-computed-diff: true + SourceMailchimpPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mailchimp-update" + x-speakeasy-entity: Source_Mailchimp + x-speakeasy-param-suppress-computed-diff: true + SourceMailgunPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mailgun-update" + x-speakeasy-entity: Source_Mailgun + x-speakeasy-param-suppress-computed-diff: true + SourceMailjetSmsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mailjet-sms-update" + x-speakeasy-entity: Source_MailjetSms + x-speakeasy-param-suppress-computed-diff: true + SourceMarketoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-marketo-update" + x-speakeasy-entity: Source_Marketo + x-speakeasy-param-suppress-computed-diff: true + SourceMetabasePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-metabase-update" + x-speakeasy-entity: Source_Metabase + x-speakeasy-param-suppress-computed-diff: true + SourceMicrosoftOnedrivePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-microsoft-onedrive-update" + x-speakeasy-entity: Source_MicrosoftOnedrive + x-speakeasy-param-suppress-computed-diff: true + SourceMicrosoftSharepointPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-microsoft-sharepoint-update" + x-speakeasy-entity: Source_MicrosoftSharepoint + x-speakeasy-param-suppress-computed-diff: true + SourceMicrosoftTeamsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-microsoft-teams-update" + x-speakeasy-entity: Source_MicrosoftTeams + x-speakeasy-param-suppress-computed-diff: true + SourceMixpanelPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mixpanel-update" + x-speakeasy-entity: Source_Mixpanel + x-speakeasy-param-suppress-computed-diff: true + SourceMondayPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-monday-update" + x-speakeasy-entity: Source_Monday + x-speakeasy-param-suppress-computed-diff: true + SourceMongodbV2PutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mongodb-v2-update" + x-speakeasy-entity: Source_MongodbV2 + x-speakeasy-param-suppress-computed-diff: true + SourceMssqlPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mssql-update" + x-speakeasy-entity: Source_Mssql + x-speakeasy-param-suppress-computed-diff: true + SourceMyHoursPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-my-hours-update" + x-speakeasy-entity: Source_MyHours + x-speakeasy-param-suppress-computed-diff: true + SourceMysqlPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-mysql-update" + x-speakeasy-entity: Source_Mysql + x-speakeasy-param-suppress-computed-diff: true + SourceNetsuitePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-netsuite-update" + x-speakeasy-entity: Source_Netsuite + x-speakeasy-param-suppress-computed-diff: true + SourceNorthpassLmsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-northpass-lms-update" + x-speakeasy-entity: Source_NorthpassLms + x-speakeasy-param-suppress-computed-diff: true + SourceNotionPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-notion-update" + x-speakeasy-entity: Source_Notion + x-speakeasy-param-suppress-computed-diff: true + SourceNylasPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-nylas-update" + x-speakeasy-entity: Source_Nylas + x-speakeasy-param-suppress-computed-diff: true + SourceNytimesPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-nytimes-update" + x-speakeasy-entity: Source_Nytimes + x-speakeasy-param-suppress-computed-diff: true + SourceOktaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-okta-update" + x-speakeasy-entity: Source_Okta + x-speakeasy-param-suppress-computed-diff: true + SourceOmnisendPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-omnisend-update" + x-speakeasy-entity: Source_Omnisend + x-speakeasy-param-suppress-computed-diff: true + SourceOnesignalPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-onesignal-update" + x-speakeasy-entity: Source_Onesignal + x-speakeasy-param-suppress-computed-diff: true + SourceOraclePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-oracle-update" + x-speakeasy-entity: Source_Oracle + x-speakeasy-param-suppress-computed-diff: true + SourceOrbPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-orb-update" + x-speakeasy-entity: Source_Orb + x-speakeasy-param-suppress-computed-diff: true + SourceOrbitPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-orbit-update" + x-speakeasy-entity: Source_Orbit + x-speakeasy-param-suppress-computed-diff: true + SourceOutbrainAmplifyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-outbrain-amplify-update" + x-speakeasy-entity: Source_OutbrainAmplify + x-speakeasy-param-suppress-computed-diff: true + SourceOutreachPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-outreach-update" + x-speakeasy-entity: Source_Outreach + x-speakeasy-param-suppress-computed-diff: true + SourcePaypalTransactionPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-paypal-transaction-update" + x-speakeasy-entity: Source_PaypalTransaction + x-speakeasy-param-suppress-computed-diff: true + SourcePaystackPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-paystack-update" + x-speakeasy-entity: Source_Paystack + x-speakeasy-param-suppress-computed-diff: true + SourcePendoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pendo-update" + x-speakeasy-entity: Source_Pendo + x-speakeasy-param-suppress-computed-diff: true + SourcePennylanePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pennylane-update" + x-speakeasy-entity: Source_Pennylane + x-speakeasy-param-suppress-computed-diff: true + SourcePersistiqPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-persistiq-update" + x-speakeasy-entity: Source_Persistiq + x-speakeasy-param-suppress-computed-diff: true + SourcePexelsApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pexels-api-update" + x-speakeasy-entity: Source_PexelsApi + x-speakeasy-param-suppress-computed-diff: true + SourcePicqerPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-picqer-update" + x-speakeasy-entity: Source_Picqer + x-speakeasy-param-suppress-computed-diff: true + SourcePinterestPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pinterest-update" + x-speakeasy-entity: Source_Pinterest + x-speakeasy-param-suppress-computed-diff: true + SourcePipedrivePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pipedrive-update" + x-speakeasy-entity: Source_Pipedrive + x-speakeasy-param-suppress-computed-diff: true + SourcePiwikPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-piwik-update" + x-speakeasy-entity: Source_Piwik + x-speakeasy-param-suppress-computed-diff: true + SourcePlanhatPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-planhat-update" + x-speakeasy-entity: Source_Planhat + x-speakeasy-param-suppress-computed-diff: true + SourcePocketPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pocket-update" + x-speakeasy-entity: Source_Pocket + x-speakeasy-param-suppress-computed-diff: true + SourcePokeapiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pokeapi-update" + x-speakeasy-entity: Source_Pokeapi + x-speakeasy-param-suppress-computed-diff: true + SourcePolygonStockApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-polygon-stock-api-update" + x-speakeasy-entity: Source_PolygonStockApi + x-speakeasy-param-suppress-computed-diff: true + SourcePostgresPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-postgres-update" + x-speakeasy-entity: Source_Postgres + x-speakeasy-param-suppress-computed-diff: true + SourcePosthogPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-posthog-update" + x-speakeasy-entity: Source_Posthog + x-speakeasy-param-suppress-computed-diff: true + SourcePostmarkappPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-postmarkapp-update" + x-speakeasy-entity: Source_Postmarkapp + x-speakeasy-param-suppress-computed-diff: true + SourcePrestashopPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-prestashop-update" + x-speakeasy-entity: Source_Prestashop + x-speakeasy-param-suppress-computed-diff: true + SourceProductboardPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-productboard-update" + x-speakeasy-entity: Source_Productboard + x-speakeasy-param-suppress-computed-diff: true + SourceProductivePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-productive-update" + x-speakeasy-entity: Source_Productive + x-speakeasy-param-suppress-computed-diff: true + SourcePypiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-pypi-update" + x-speakeasy-entity: Source_Pypi + x-speakeasy-param-suppress-computed-diff: true + SourceQualarooPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-qualaroo-update" + x-speakeasy-entity: Source_Qualaroo + x-speakeasy-param-suppress-computed-diff: true + SourceRailzPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-railz-update" + x-speakeasy-entity: Source_Railz + x-speakeasy-param-suppress-computed-diff: true + SourceRechargePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-recharge-update" + x-speakeasy-entity: Source_Recharge + x-speakeasy-param-suppress-computed-diff: true + SourceRecreationPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-recreation-update" + x-speakeasy-entity: Source_Recreation + x-speakeasy-param-suppress-computed-diff: true + SourceRecruiteePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-recruitee-update" + x-speakeasy-entity: Source_Recruitee + x-speakeasy-param-suppress-computed-diff: true + SourceRecurlyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-recurly-update" + x-speakeasy-entity: Source_Recurly + x-speakeasy-param-suppress-computed-diff: true + SourceRedditPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-reddit-update" + x-speakeasy-entity: Source_Reddit + x-speakeasy-param-suppress-computed-diff: true + SourceRedshiftPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-redshift-update" + x-speakeasy-entity: Source_Redshift + x-speakeasy-param-suppress-computed-diff: true + SourceRetentlyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-retently-update" + x-speakeasy-entity: Source_Retently + x-speakeasy-param-suppress-computed-diff: true + SourceRkiCovidPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-rki-covid-update" + x-speakeasy-entity: Source_RkiCovid + x-speakeasy-param-suppress-computed-diff: true + SourceRollbarPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-rollbar-update" + x-speakeasy-entity: Source_Rollbar + x-speakeasy-param-suppress-computed-diff: true + SourceRssPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-rss-update" + x-speakeasy-entity: Source_Rss + x-speakeasy-param-suppress-computed-diff: true + SourceS3PutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-s3-update" + x-speakeasy-entity: Source_S3 + x-speakeasy-param-suppress-computed-diff: true + SourceSalesforcePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-salesforce-update" + x-speakeasy-entity: Source_Salesforce + x-speakeasy-param-suppress-computed-diff: true + SourceSalesloftPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-salesloft-update" + x-speakeasy-entity: Source_Salesloft + x-speakeasy-param-suppress-computed-diff: true + SourceSapFieldglassPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sap-fieldglass-update" + x-speakeasy-entity: Source_SapFieldglass + x-speakeasy-param-suppress-computed-diff: true + SourceSavvycalPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-savvycal-update" + x-speakeasy-entity: Source_Savvycal + x-speakeasy-param-suppress-computed-diff: true + SourceScryfallPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-scryfall-update" + x-speakeasy-entity: Source_Scryfall + x-speakeasy-param-suppress-computed-diff: true + SourceSecodaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-secoda-update" + x-speakeasy-entity: Source_Secoda + x-speakeasy-param-suppress-computed-diff: true + SourceSendgridPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sendgrid-update" + x-speakeasy-entity: Source_Sendgrid + x-speakeasy-param-suppress-computed-diff: true + SourceSendinbluePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sendinblue-update" + x-speakeasy-entity: Source_Sendinblue + x-speakeasy-param-suppress-computed-diff: true + SourceSenseforcePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-senseforce-update" + x-speakeasy-entity: Source_Senseforce + x-speakeasy-param-suppress-computed-diff: true + SourceSentryPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sentry-update" + x-speakeasy-entity: Source_Sentry + x-speakeasy-param-suppress-computed-diff: true + SourceSftpPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sftp-update" + x-speakeasy-entity: Source_Sftp + x-speakeasy-param-suppress-computed-diff: true + SourceSftpBulkPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sftp-bulk-update" + x-speakeasy-entity: Source_SftpBulk + x-speakeasy-param-suppress-computed-diff: true + SourceShopifyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-shopify-update" + x-speakeasy-entity: Source_Shopify + x-speakeasy-param-suppress-computed-diff: true + SourceShortcutPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-shortcut-update" + x-speakeasy-entity: Source_Shortcut + x-speakeasy-param-suppress-computed-diff: true + SourceShortioPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-shortio-update" + x-speakeasy-entity: Source_Shortio + x-speakeasy-param-suppress-computed-diff: true + SourceSlackPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-slack-update" + x-speakeasy-entity: Source_Slack + x-speakeasy-param-suppress-computed-diff: true + SourceSmailyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-smaily-update" + x-speakeasy-entity: Source_Smaily + x-speakeasy-param-suppress-computed-diff: true + SourceSmartengagePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-smartengage-update" + x-speakeasy-entity: Source_Smartengage + x-speakeasy-param-suppress-computed-diff: true + SourceSmartsheetsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-smartsheets-update" + x-speakeasy-entity: Source_Smartsheets + x-speakeasy-param-suppress-computed-diff: true + SourceSnapchatMarketingPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-snapchat-marketing-update" + x-speakeasy-entity: Source_SnapchatMarketing + x-speakeasy-param-suppress-computed-diff: true + SourceSnowflakePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-snowflake-update" + x-speakeasy-entity: Source_Snowflake + x-speakeasy-param-suppress-computed-diff: true + SourceSonarCloudPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-sonar-cloud-update" + x-speakeasy-entity: Source_SonarCloud + x-speakeasy-param-suppress-computed-diff: true + SourceSpacexApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-spacex-api-update" + x-speakeasy-entity: Source_SpacexApi + x-speakeasy-param-suppress-computed-diff: true + SourceSplitIoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-split-io-update" + x-speakeasy-entity: Source_SplitIo + x-speakeasy-param-suppress-computed-diff: true + SourceSquarePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-square-update" + x-speakeasy-entity: Source_Square + x-speakeasy-param-suppress-computed-diff: true + SourceStravaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-strava-update" + x-speakeasy-entity: Source_Strava + x-speakeasy-param-suppress-computed-diff: true + SourceStripePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-stripe-update" + x-speakeasy-entity: Source_Stripe + x-speakeasy-param-suppress-computed-diff: true + SourceSurveySparrowPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-survey-sparrow-update" + x-speakeasy-entity: Source_SurveySparrow + x-speakeasy-param-suppress-computed-diff: true + SourceSurveymonkeyPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-surveymonkey-update" + x-speakeasy-entity: Source_Surveymonkey + x-speakeasy-param-suppress-computed-diff: true + SourceSurvicatePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-survicate-update" + x-speakeasy-entity: Source_Survicate + x-speakeasy-param-suppress-computed-diff: true + SourceTeamworkPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-teamwork-update" + x-speakeasy-entity: Source_Teamwork + x-speakeasy-param-suppress-computed-diff: true + SourceTempoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-tempo-update" + x-speakeasy-entity: Source_Tempo + x-speakeasy-param-suppress-computed-diff: true + SourceTheGuardianApiPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-the-guardian-api-update" + x-speakeasy-entity: Source_TheGuardianApi + x-speakeasy-param-suppress-computed-diff: true + SourceTiktokMarketingPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-tiktok-marketing-update" + x-speakeasy-entity: Source_TiktokMarketing + x-speakeasy-param-suppress-computed-diff: true + SourceTrelloPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-trello-update" + x-speakeasy-entity: Source_Trello + x-speakeasy-param-suppress-computed-diff: true + SourceTrustpilotPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-trustpilot-update" + x-speakeasy-entity: Source_Trustpilot + x-speakeasy-param-suppress-computed-diff: true + SourceTvmazeSchedulePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-tvmaze-schedule-update" + x-speakeasy-entity: Source_TvmazeSchedule + x-speakeasy-param-suppress-computed-diff: true + SourceTwilioPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-twilio-update" + x-speakeasy-entity: Source_Twilio + x-speakeasy-param-suppress-computed-diff: true + SourceTwilioTaskrouterPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-twilio-taskrouter-update" + x-speakeasy-entity: Source_TwilioTaskrouter + x-speakeasy-param-suppress-computed-diff: true + SourceTwitterPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-twitter-update" + x-speakeasy-entity: Source_Twitter + x-speakeasy-param-suppress-computed-diff: true + SourceTypeformPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-typeform-update" + x-speakeasy-entity: Source_Typeform + x-speakeasy-param-suppress-computed-diff: true + SourceUsCensusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-us-census-update" + x-speakeasy-entity: Source_UsCensus + x-speakeasy-param-suppress-computed-diff: true + SourceVantagePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-vantage-update" + x-speakeasy-entity: Source_Vantage + x-speakeasy-param-suppress-computed-diff: true + SourceVwoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-vwo-update" + x-speakeasy-entity: Source_Vwo + x-speakeasy-param-suppress-computed-diff: true + SourceWebflowPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-webflow-update" + x-speakeasy-entity: Source_Webflow + x-speakeasy-param-suppress-computed-diff: true + SourceWhenIWorkPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-when-i-work-update" + x-speakeasy-entity: Source_WhenIWork + x-speakeasy-param-suppress-computed-diff: true + SourceWhiskyHunterPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-whisky-hunter-update" + x-speakeasy-entity: Source_WhiskyHunter + x-speakeasy-param-suppress-computed-diff: true + SourceWikipediaPageviewsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-wikipedia-pageviews-update" + x-speakeasy-entity: Source_WikipediaPageviews + x-speakeasy-param-suppress-computed-diff: true + SourceWoocommercePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-woocommerce-update" + x-speakeasy-entity: Source_Woocommerce + x-speakeasy-param-suppress-computed-diff: true + SourceXkcdPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-xkcd-update" + x-speakeasy-entity: Source_Xkcd + x-speakeasy-param-suppress-computed-diff: true + SourceYandexMetricaPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-yandex-metrica-update" + x-speakeasy-entity: Source_YandexMetrica + x-speakeasy-param-suppress-computed-diff: true + SourceYotpoPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-yotpo-update" + x-speakeasy-entity: Source_Yotpo + x-speakeasy-param-suppress-computed-diff: true + SourceYoutubeAnalyticsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-youtube-analytics-update" + x-speakeasy-entity: Source_YoutubeAnalytics + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskChatPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zendesk-chat-update" + x-speakeasy-entity: Source_ZendeskChat + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskSunshinePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zendesk-sunshine-update" + x-speakeasy-entity: Source_ZendeskSunshine + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskSupportPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zendesk-support-update" + x-speakeasy-entity: Source_ZendeskSupport + x-speakeasy-param-suppress-computed-diff: true + SourceZendeskTalkPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zendesk-talk-update" + x-speakeasy-entity: Source_ZendeskTalk + x-speakeasy-param-suppress-computed-diff: true + SourceZenloopPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zenloop-update" + x-speakeasy-entity: Source_Zenloop + x-speakeasy-param-suppress-computed-diff: true + SourceZohoCrmPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoho-crm-update" + x-speakeasy-entity: Source_ZohoCrm + x-speakeasy-param-suppress-computed-diff: true + SourceZoomPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-zoom-update" + x-speakeasy-entity: Source_Zoom + x-speakeasy-param-suppress-computed-diff: true + SourceCustomPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/source-custom-update" + x-speakeasy-entity: Source_Custom + x-speakeasy-param-suppress-computed-diff: true + DestinationAstraPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-astra-update" + x-speakeasy-entity: Destination_Astra + x-speakeasy-param-suppress-computed-diff: true + DestinationAwsDatalakePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-aws-datalake-update" + x-speakeasy-entity: Destination_AwsDatalake + x-speakeasy-param-suppress-computed-diff: true + DestinationAzureBlobStoragePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-azure-blob-storage-update" + x-speakeasy-entity: Destination_AzureBlobStorage + x-speakeasy-param-suppress-computed-diff: true + DestinationBigqueryPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-bigquery-update" + x-speakeasy-entity: Destination_Bigquery + x-speakeasy-param-suppress-computed-diff: true + DestinationClickhousePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-clickhouse-update" + x-speakeasy-entity: Destination_Clickhouse + x-speakeasy-param-suppress-computed-diff: true + DestinationConvexPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-convex-update" + x-speakeasy-entity: Destination_Convex + x-speakeasy-param-suppress-computed-diff: true + DestinationDatabricksPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-databricks-update" + x-speakeasy-entity: Destination_Databricks + x-speakeasy-param-suppress-computed-diff: true + DestinationDevNullPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-dev-null-update" + x-speakeasy-entity: Destination_DevNull + x-speakeasy-param-suppress-computed-diff: true + DestinationDuckdbPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-duckdb-update" + x-speakeasy-entity: Destination_Duckdb + x-speakeasy-param-suppress-computed-diff: true + DestinationDynamodbPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-dynamodb-update" + x-speakeasy-entity: Destination_Dynamodb + x-speakeasy-param-suppress-computed-diff: true + DestinationElasticsearchPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-elasticsearch-update" + x-speakeasy-entity: Destination_Elasticsearch + x-speakeasy-param-suppress-computed-diff: true + DestinationFireboltPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-firebolt-update" + x-speakeasy-entity: Destination_Firebolt + x-speakeasy-param-suppress-computed-diff: true + DestinationFirestorePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-firestore-update" + x-speakeasy-entity: Destination_Firestore + x-speakeasy-param-suppress-computed-diff: true + DestinationGcsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-gcs-update" + x-speakeasy-entity: Destination_Gcs + x-speakeasy-param-suppress-computed-diff: true + DestinationGoogleSheetsPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-google-sheets-update" + x-speakeasy-entity: Destination_GoogleSheets + x-speakeasy-param-suppress-computed-diff: true + DestinationIcebergPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-iceberg-update" + x-speakeasy-entity: Destination_Iceberg + x-speakeasy-param-suppress-computed-diff: true + DestinationMilvusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-milvus-update" + x-speakeasy-entity: Destination_Milvus + x-speakeasy-param-suppress-computed-diff: true + DestinationMongodbPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-mongodb-update" + x-speakeasy-entity: Destination_Mongodb + x-speakeasy-param-suppress-computed-diff: true + DestinationMssqlPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-mssql-update" + x-speakeasy-entity: Destination_Mssql + x-speakeasy-param-suppress-computed-diff: true + DestinationMysqlPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-mysql-update" + x-speakeasy-entity: Destination_Mysql + x-speakeasy-param-suppress-computed-diff: true + DestinationOraclePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-oracle-update" + x-speakeasy-entity: Destination_Oracle + x-speakeasy-param-suppress-computed-diff: true + DestinationPgvectorPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-pgvector-update" + x-speakeasy-entity: Destination_Pgvector + x-speakeasy-param-suppress-computed-diff: true + DestinationPineconePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-pinecone-update" + x-speakeasy-entity: Destination_Pinecone + x-speakeasy-param-suppress-computed-diff: true + DestinationPostgresPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-postgres-update" + x-speakeasy-entity: Destination_Postgres + x-speakeasy-param-suppress-computed-diff: true + DestinationPubsubPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-pubsub-update" + x-speakeasy-entity: Destination_Pubsub + x-speakeasy-param-suppress-computed-diff: true + DestinationQdrantPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-qdrant-update" + x-speakeasy-entity: Destination_Qdrant + x-speakeasy-param-suppress-computed-diff: true + DestinationRedisPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-redis-update" + x-speakeasy-entity: Destination_Redis + x-speakeasy-param-suppress-computed-diff: true + DestinationRedshiftPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-redshift-update" + x-speakeasy-entity: Destination_Redshift + x-speakeasy-param-suppress-computed-diff: true + DestinationS3PutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-s3-update" + x-speakeasy-entity: Destination_S3 + x-speakeasy-param-suppress-computed-diff: true + DestinationS3GluePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-s3-glue-update" + x-speakeasy-entity: Destination_S3Glue + x-speakeasy-param-suppress-computed-diff: true + DestinationSftpJsonPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-sftp-json-update" + x-speakeasy-entity: Destination_SftpJson + x-speakeasy-param-suppress-computed-diff: true + DestinationSnowflakePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-snowflake-update" + x-speakeasy-entity: Destination_Snowflake + x-speakeasy-param-suppress-computed-diff: true + DestinationSnowflakeCortexPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-snowflake-cortex-update" + x-speakeasy-entity: Destination_SnowflakeCortex + x-speakeasy-param-suppress-computed-diff: true + DestinationTeradataPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-teradata-update" + x-speakeasy-entity: Destination_Teradata + x-speakeasy-param-suppress-computed-diff: true + DestinationTimeplusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-timeplus-update" + x-speakeasy-entity: Destination_Timeplus + x-speakeasy-param-suppress-computed-diff: true + DestinationTypesensePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-typesense-update" + x-speakeasy-entity: Destination_Typesense + x-speakeasy-param-suppress-computed-diff: true + DestinationVectaraPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-vectara-update" + x-speakeasy-entity: Destination_Vectara + x-speakeasy-param-suppress-computed-diff: true + DestinationWeaviatePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-weaviate-update" + x-speakeasy-entity: Destination_Weaviate + x-speakeasy-param-suppress-computed-diff: true + DestinationYellowbrickPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-yellowbrick-update" + x-speakeasy-entity: Destination_Yellowbrick + x-speakeasy-param-suppress-computed-diff: true + DestinationCustomPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-custom-update" + x-speakeasy-entity: Destination_Custom + x-speakeasy-param-suppress-computed-diff: true + source-custom: + description: The values required to configure the source. + example: { user: "charles" } + destination-custom: + description: The values required to configure the destination. + example: { user: "charles" } + source-custom-update: + title: "Custom Spec" + destination-custom-update: + title: "Custom Spec" + SourceConfiguration: + description: The values required to configure the source. + example: { user: "charles" } + DestinationConfiguration: + description: The values required to configure the destination. + example: { user: "charles" } + securitySchemes: + bearerAuth: + type: http + scheme: bearer + bearerFormat: JWT + basicAuth: + type: http + scheme: basic + clientCredentials: + type: oauth2 + flows: + clientCredentials: + tokenUrl: /applications/token + scopes: {} +security: + - bearerAuth: [] + - basicAuth: [] + - clientCredentials: [] From a87406eef968d7a554758a0156cadac931ce6129 Mon Sep 17 00:00:00 2001 From: Octavia Squidington III <90398440+octavia-squidington-iii@users.noreply.github.com> Date: Tue, 1 Oct 2024 21:43:51 +0100 Subject: [PATCH 34/36] chore: update CDK version following release (#14194) Co-authored-by: lazebnyi <53845333+lazebnyi@users.noreply.github.com> --- airbyte-connector-builder-resources/CDK_VERSION | 2 +- airbyte-connector-builder-server/Dockerfile | 2 +- airbyte-connector-builder-server/requirements.in | 2 +- airbyte-connector-builder-server/requirements.txt | 10 +++++----- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/airbyte-connector-builder-resources/CDK_VERSION b/airbyte-connector-builder-resources/CDK_VERSION index 6ab7204dabc..1802e771b4f 100644 --- a/airbyte-connector-builder-resources/CDK_VERSION +++ b/airbyte-connector-builder-resources/CDK_VERSION @@ -1 +1 @@ -5.7.5 +5.8.1 diff --git a/airbyte-connector-builder-server/Dockerfile b/airbyte-connector-builder-server/Dockerfile index 18f7aceeacb..085cb9417ba 100644 --- a/airbyte-connector-builder-server/Dockerfile +++ b/airbyte-connector-builder-server/Dockerfile @@ -2,7 +2,7 @@ ARG JAVA_PYTHON_BASE_IMAGE_VERSION=2.1.7 FROM airbyte/airbyte-base-java-python-image:${JAVA_PYTHON_BASE_IMAGE_VERSION} AS connector-builder-server # Set up CDK requirements -ARG CDK_VERSION=5.7.5 +ARG CDK_VERSION=5.8.1 ENV CDK_PYTHON=${PYENV_ROOT}/versions/${PYTHON_VERSION}/bin/python ENV CDK_ENTRYPOINT ${PYENV_ROOT}/versions/${PYTHON_VERSION}/lib/python3.10/site-packages/airbyte_cdk/connector_builder/main.py # Set up CDK diff --git a/airbyte-connector-builder-server/requirements.in b/airbyte-connector-builder-server/requirements.in index c52a6bbb432..efdbd9bb1f9 100644 --- a/airbyte-connector-builder-server/requirements.in +++ b/airbyte-connector-builder-server/requirements.in @@ -1 +1 @@ -airbyte-cdk==5.7.5 +airbyte-cdk==5.8.1 diff --git a/airbyte-connector-builder-server/requirements.txt b/airbyte-connector-builder-server/requirements.txt index 146dfa5da90..792b1f07dc6 100644 --- a/airbyte-connector-builder-server/requirements.txt +++ b/airbyte-connector-builder-server/requirements.txt @@ -4,7 +4,7 @@ # # pip-compile # -airbyte-cdk==5.7.5 +airbyte-cdk==5.8.1 # via -r requirements.in airbyte-protocol-models-dataclasses==0.13.0 # via airbyte-cdk @@ -21,7 +21,7 @@ attrs==24.2.0 # requests-cache backoff==2.2.1 # via airbyte-cdk -bracex==2.5 +bracex==2.5.post1 # via wcmatch cachetools==5.5.0 # via airbyte-cdk @@ -52,7 +52,7 @@ genson==1.2.2 # via airbyte-cdk h11==0.14.0 # via httpcore -httpcore==1.0.5 +httpcore==1.0.6 # via httpx httpx==0.27.2 # via langsmith @@ -83,7 +83,7 @@ markupsafe==2.1.5 # via jinja2 nltk==3.8.1 # via airbyte-cdk -numpy==1.26.4 +numpy==2.1.1 # via pandas orjson==3.10.7 # via @@ -91,7 +91,7 @@ orjson==3.10.7 # langsmith packaging==23.2 # via langchain-core -pandas==2.2.0 +pandas==2.2.2 # via airbyte-cdk pendulum==2.1.2 # via airbyte-cdk From 39a89c5c3b2aaf30e210e4c0133494a49475a06b Mon Sep 17 00:00:00 2001 From: Benoit Moriceau Date: Tue, 1 Oct 2024 15:03:34 -0700 Subject: [PATCH 35/36] chore: add the file transfer feature flag (#14213) --- airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt index 7fa01f13a80..8eec3be0847 100644 --- a/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt +++ b/airbyte-featureflag/src/main/kotlin/FlagDefinitions.kt @@ -181,3 +181,5 @@ object NodeSelectorOverride : Temporary(key = "platform.node-selector-ov object UseAsyncReplicate : Temporary(key = "platform.use-async-replicate", default = false) object UseRouteToTaskRouting : Temporary(key = "platform.use-route-to-task-routing", default = true) + +object UseFileTransferMode : Temporary(key = "platform.use-file-transfer-mode", default = false) From 156f8f998b56ca9a2910f2c16229807e6bc8af82 Mon Sep 17 00:00:00 2001 From: Vladimir Date: Wed, 2 Oct 2024 01:35:59 +0300 Subject: [PATCH 36/36] feat: enable field selection on disabled stream (#14196) --- .../components/StreamFieldCell.tsx | 22 ++++---- .../components/StreamNameCell.tsx | 9 +++- .../SyncCatalog/streamConfigHelpers.test.ts | 53 +++++++++++++++++++ .../SyncCatalog/streamConfigHelpers.ts | 21 ++++++++ 4 files changed, 95 insertions(+), 10 deletions(-) diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamFieldCell.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamFieldCell.tsx index 51125d80a52..83f10165569 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamFieldCell.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamFieldCell.tsx @@ -22,7 +22,7 @@ import { isCursor as checkIsCursor, isPrimaryKey as checkIsPrimaryKey, } from "../../../syncCatalog/StreamFieldsTable/StreamFieldsTable"; -import { updateFieldSelected } from "../../../syncCatalog/SyncCatalog/streamConfigHelpers"; +import { getSelectedMandatoryFields, updateFieldSelected } from "../../../syncCatalog/SyncCatalog/streamConfigHelpers"; import { getFieldPathDisplayName } from "../../../syncCatalog/utils"; import { SyncStreamFieldWithId } from "../../formConfig"; import { SyncCatalogUIModel } from "../SyncCatalogTable"; @@ -64,13 +64,11 @@ export const StreamFieldNameCell: React.FC = ({ const isHashed = checkIsFieldHashed(field, config); const isDisabled = - !config?.selected || - mode === "readonly" || - (config.syncMode === SyncMode.incremental && (isCursor || isChildFieldCursor)) || - (config.destinationSyncMode === DestinationSyncMode.append_dedup && (isPrimaryKey || isChildFieldPrimaryKey)) || - (config.destinationSyncMode === DestinationSyncMode.overwrite_dedup && (isPrimaryKey || isChildFieldPrimaryKey)) || - isNestedField; - const showTooltip = isDisabled && mode !== "readonly" && config?.selected; + config?.selected && + ((config.syncMode === SyncMode.incremental && (isCursor || isChildFieldCursor)) || + (config.destinationSyncMode === DestinationSyncMode.append_dedup && (isPrimaryKey || isChildFieldPrimaryKey)) || + (config.destinationSyncMode === DestinationSyncMode.overwrite_dedup && (isPrimaryKey || isChildFieldPrimaryKey))); + const showTooltip = isDisabled && mode !== "readonly"; const isFieldSelected = checkIsFieldSelected(field, config); @@ -98,9 +96,15 @@ export const StreamFieldNameCell: React.FC = ({ numberOfFieldsInStream, }); + const mandatorySelectedFields = getSelectedMandatoryFields(config); + updateStreamField(row.original.streamNode, { ...updatedConfig, - selectedFields: !updatedConfig?.fieldSelectionEnabled ? [] : updatedConfig?.selectedFields, + // any field selection immediately enables the disabled stream + ...(isSelected && !config?.selected && { selected: true }), + selectedFields: !updatedConfig?.fieldSelectionEnabled + ? [] + : [...(updatedConfig?.selectedFields ?? []), ...mandatorySelectedFields], // remove this field if it was part of hashedFields hashedFields: config.hashedFields?.filter((f) => !isEqual(f.fieldPath, fieldPath)), }); diff --git a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamNameCell.tsx b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamNameCell.tsx index dfeb836b8fb..ab0edbce72f 100644 --- a/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamNameCell.tsx +++ b/airbyte-webapp/src/components/connection/ConnectionForm/SyncCatalogTable/components/StreamNameCell.tsx @@ -50,7 +50,14 @@ export const StreamNameCell: React.FC = ({ updateStreamField(row.original.streamNode!, { selected: checked })} + onChange={({ target: { checked } }) => + updateStreamField(row.original.streamNode!, { + selected: checked, + // enable/disable stream will enable/disable all fields + fieldSelectionEnabled: false, + selectedFields: [], + }) + } data-testid="sync-stream-checkbox" />
  • + {flexRender(cell.column.columnDef.cell, cell.getContext())}