From 6c7f89ec9032c8da72c125592d2f6ca8aeed54ac Mon Sep 17 00:00:00 2001 From: terencecho Date: Mon, 11 Sep 2023 19:56:13 -0700 Subject: [PATCH] Update to latest openapi spec --- .gitattributes | 2 + README.md | 2 +- airbyte.yaml | 4327 +++++++++++------ docs/data-sources/destination_bigquery.md | 3 +- docs/data-sources/destination_milvus.md | 221 + docs/data-sources/destination_pinecone.md | 129 + docs/data-sources/destination_snowflake.md | 3 +- docs/data-sources/destination_typesense.md | 2 +- docs/data-sources/source_amazon_ads.md | 3 +- docs/data-sources/source_apify_dataset.md | 1 + docs/data-sources/source_auth0.md | 1 + docs/data-sources/source_datadog.md | 65 - docs/data-sources/source_google_ads.md | 4 +- .../source_google_search_console.md | 21 +- docs/data-sources/source_google_sheets.md | 1 - docs/data-sources/source_intercom.md | 2 + docs/data-sources/source_lemlist.md | 2 +- docs/data-sources/source_linkedin_ads.md | 30 +- docs/data-sources/source_mssql.md | 26 +- docs/data-sources/source_openweather.md | 53 - docs/data-sources/source_postgres.md | 50 +- docs/data-sources/source_posthog.md | 1 + docs/data-sources/source_public_apis.md | 46 - docs/data-sources/source_s3.md | 216 +- docs/data-sources/source_stripe.md | 2 +- docs/data-sources/source_zendesk_sunshine.md | 16 - docs/index.md | 2 +- docs/resources/destination_bigquery.md | 8 +- .../destination_bigquery_denormalized.md | 6 +- docs/resources/destination_clickhouse.md | 6 +- docs/resources/destination_convex.md | 4 +- docs/resources/destination_cumulio.md | 4 +- docs/resources/destination_databend.md | 6 +- docs/resources/destination_databricks.md | 6 +- docs/resources/destination_dev_null.md | 4 +- docs/resources/destination_dynamodb.md | 6 +- docs/resources/destination_elasticsearch.md | 6 +- docs/resources/destination_firebolt.md | 4 +- docs/resources/destination_firestore.md | 4 +- docs/resources/destination_gcs.md | 6 +- docs/resources/destination_google_sheets.md | 4 +- docs/resources/destination_keen.md | 6 +- docs/resources/destination_kinesis.md | 8 +- docs/resources/destination_langchain.md | 6 +- docs/resources/destination_milvus.md | 291 ++ docs/resources/destination_mongodb.md | 6 +- docs/resources/destination_mssql.md | 6 +- docs/resources/destination_mysql.md | 6 +- docs/resources/destination_oracle.md | 6 +- docs/resources/destination_pinecone.md | 170 + docs/resources/destination_postgres.md | 6 +- docs/resources/destination_pubsub.md | 14 +- docs/resources/destination_redis.md | 8 +- docs/resources/destination_redshift.md | 12 +- docs/resources/destination_s3.md | 8 +- docs/resources/destination_s3_glue.md | 8 +- docs/resources/destination_sftp_json.md | 6 +- docs/resources/destination_snowflake.md | 10 +- docs/resources/destination_timeplus.md | 4 +- docs/resources/destination_typesense.md | 8 +- docs/resources/destination_vertica.md | 6 +- docs/resources/destination_xata.md | 4 +- docs/resources/source_aha.md | 4 +- docs/resources/source_aircall.md | 4 +- docs/resources/source_airtable.md | 6 +- docs/resources/source_alloydb.md | 10 +- docs/resources/source_amazon_ads.md | 14 +- .../resources/source_amazon_seller_partner.md | 14 +- docs/resources/source_amazon_sqs.md | 8 +- docs/resources/source_amplitude.md | 6 +- docs/resources/source_apify_dataset.md | 8 +- docs/resources/source_appfollow.md | 4 +- docs/resources/source_asana.md | 4 +- docs/resources/source_auth0.md | 9 +- docs/resources/source_aws_cloudtrail.md | 4 +- docs/resources/source_azure_blob_storage.md | 4 +- docs/resources/source_azure_table.md | 6 +- docs/resources/source_bamboo_hr.md | 4 +- docs/resources/source_bigcommerce.md | 4 +- docs/resources/source_bigquery.md | 4 +- docs/resources/source_bing_ads.md | 8 +- docs/resources/source_braintree.md | 8 +- docs/resources/source_braze.md | 6 +- docs/resources/source_chargebee.md | 4 +- docs/resources/source_chartmogul.md | 6 +- docs/resources/source_clickhouse.md | 6 +- docs/resources/source_clickup_api.md | 6 +- docs/resources/source_clockify.md | 4 +- docs/resources/source_close_com.md | 4 +- docs/resources/source_coda.md | 4 +- docs/resources/source_coin_api.md | 6 +- docs/resources/source_coinmarketcap.md | 6 +- docs/resources/source_configcat.md | 6 +- docs/resources/source_confluence.md | 4 +- docs/resources/source_convex.md | 4 +- docs/resources/source_datadog.md | 87 - docs/resources/source_datascope.md | 4 +- docs/resources/source_delighted.md | 4 +- docs/resources/source_dixa.md | 6 +- docs/resources/source_dockerhub.md | 4 +- docs/resources/source_dremio.md | 4 +- docs/resources/source_dynamodb.md | 6 +- docs/resources/source_e2e_test_cloud.md | 8 +- docs/resources/source_emailoctopus.md | 4 +- docs/resources/source_exchange_rates.md | 6 +- docs/resources/source_facebook_marketing.md | 26 +- docs/resources/source_facebook_pages.md | 4 +- docs/resources/source_faker.md | 12 +- docs/resources/source_fauna.md | 8 +- docs/resources/source_file_secure.md | 8 +- docs/resources/source_firebolt.md | 4 +- docs/resources/source_freshcaller.md | 6 +- docs/resources/source_freshdesk.md | 6 +- docs/resources/source_freshsales.md | 4 +- docs/resources/source_gainsight_px.md | 4 +- docs/resources/source_gcs.md | 4 +- docs/resources/source_getlago.md | 4 +- docs/resources/source_github.md | 8 +- docs/resources/source_gitlab.md | 6 +- docs/resources/source_glassfrog.md | 4 +- docs/resources/source_gnews.md | 18 +- docs/resources/source_google_ads.md | 8 +- .../source_google_analytics_data_api.md | 8 +- docs/resources/source_google_analytics_v4.md | 4 +- docs/resources/source_google_directory.md | 6 +- .../source_google_pagespeed_insights.md | 4 +- .../resources/source_google_search_console.md | 39 +- docs/resources/source_google_sheets.md | 6 +- docs/resources/source_google_webfonts.md | 4 +- .../source_google_workspace_admin_reports.md | 8 +- docs/resources/source_greenhouse.md | 4 +- docs/resources/source_gridly.md | 4 +- docs/resources/source_harvest.md | 4 +- docs/resources/source_hubplanner.md | 4 +- docs/resources/source_hubspot.md | 4 +- docs/resources/source_insightly.md | 4 +- docs/resources/source_instagram.md | 4 +- docs/resources/source_instatus.md | 4 +- docs/resources/source_intercom.md | 17 +- docs/resources/source_ip2whois.md | 6 +- docs/resources/source_iterable.md | 4 +- docs/resources/source_jira.md | 10 +- docs/resources/source_k6_cloud.md | 4 +- docs/resources/source_klarna.md | 10 +- docs/resources/source_klaviyo.md | 4 +- docs/resources/source_kustomer_singer.md | 4 +- docs/resources/source_kyve.md | 14 +- docs/resources/source_launchdarkly.md | 4 +- docs/resources/source_lemlist.md | 6 +- docs/resources/source_lever_hiring.md | 4 +- docs/resources/source_linkedin_ads.md | 38 +- docs/resources/source_linkedin_pages.md | 4 +- docs/resources/source_linnworks.md | 6 +- docs/resources/source_lokalise.md | 4 +- docs/resources/source_mailchimp.md | 4 +- docs/resources/source_mailgun.md | 4 +- docs/resources/source_mailjet_sms.md | 4 +- docs/resources/source_marketo.md | 4 +- docs/resources/source_metabase.md | 6 +- docs/resources/source_microsoft_teams.md | 4 +- docs/resources/source_mixpanel.md | 14 +- docs/resources/source_monday.md | 4 +- docs/resources/source_mongodb.md | 4 +- docs/resources/source_mongodb_internal_poc.md | 4 +- docs/resources/source_mssql.md | 40 +- docs/resources/source_my_hours.md | 6 +- docs/resources/source_mysql.md | 6 +- docs/resources/source_netsuite.md | 6 +- docs/resources/source_notion.md | 4 +- docs/resources/source_nytimes.md | 6 +- docs/resources/source_okta.md | 4 +- docs/resources/source_omnisend.md | 4 +- docs/resources/source_onesignal.md | 4 +- docs/resources/source_openweather.md | 66 - docs/resources/source_oracle.md | 10 +- docs/resources/source_orb.md | 6 +- docs/resources/source_orbit.md | 4 +- docs/resources/source_outbrain_amplify.md | 8 +- docs/resources/source_outreach.md | 4 +- docs/resources/source_paypal_transaction.md | 4 +- docs/resources/source_paystack.md | 6 +- docs/resources/source_pendo.md | 4 +- docs/resources/source_persistiq.md | 4 +- docs/resources/source_pexels_api.md | 6 +- docs/resources/source_pinterest.md | 6 +- docs/resources/source_pipedrive.md | 4 +- docs/resources/source_pocket.md | 8 +- docs/resources/source_pokeapi.md | 4 +- docs/resources/source_polygon_stock_api.md | 12 +- docs/resources/source_postgres.md | 66 +- docs/resources/source_posthog.md | 14 +- docs/resources/source_postmarkapp.md | 4 +- docs/resources/source_prestashop.md | 4 +- docs/resources/source_public_apis.md | 51 - docs/resources/source_punk_api.md | 4 +- docs/resources/source_pypi.md | 4 +- docs/resources/source_qualaroo.md | 4 +- docs/resources/source_quickbooks.md | 6 +- docs/resources/source_railz.md | 4 +- docs/resources/source_recharge.md | 4 +- docs/resources/source_recreation.md | 4 +- docs/resources/source_recruitee.md | 4 +- docs/resources/source_recurly.md | 4 +- docs/resources/source_redshift.md | 6 +- docs/resources/source_retently.md | 4 +- docs/resources/source_rki_covid.md | 4 +- docs/resources/source_rss.md | 4 +- docs/resources/source_s3.md | 275 +- docs/resources/source_salesforce.md | 10 +- docs/resources/source_salesloft.md | 4 +- docs/resources/source_sap_fieldglass.md | 4 +- docs/resources/source_secoda.md | 4 +- docs/resources/source_sendgrid.md | 4 +- docs/resources/source_sendinblue.md | 4 +- docs/resources/source_senseforce.md | 6 +- docs/resources/source_sentry.md | 6 +- docs/resources/source_sftp.md | 6 +- docs/resources/source_sftp_bulk.md | 10 +- docs/resources/source_shopify.md | 8 +- docs/resources/source_shortio.md | 4 +- docs/resources/source_slack.md | 8 +- docs/resources/source_smaily.md | 4 +- docs/resources/source_smartengage.md | 4 +- docs/resources/source_smartsheets.md | 6 +- docs/resources/source_snapchat_marketing.md | 4 +- docs/resources/source_snowflake.md | 4 +- docs/resources/source_sonar_cloud.md | 4 +- docs/resources/source_spacex_api.md | 6 +- docs/resources/source_square.md | 8 +- docs/resources/source_strava.md | 4 +- docs/resources/source_stripe.md | 10 +- docs/resources/source_survey_sparrow.md | 4 +- docs/resources/source_surveymonkey.md | 4 +- docs/resources/source_tempo.md | 4 +- docs/resources/source_the_guardian_api.md | 8 +- docs/resources/source_tiktok_marketing.md | 12 +- docs/resources/source_todoist.md | 4 +- docs/resources/source_trello.md | 4 +- docs/resources/source_trustpilot.md | 4 +- docs/resources/source_tvmaze_schedule.md | 6 +- docs/resources/source_twilio.md | 4 +- docs/resources/source_twilio_taskrouter.md | 4 +- docs/resources/source_twitter.md | 8 +- docs/resources/source_typeform.md | 6 +- docs/resources/source_us_census.md | 8 +- docs/resources/source_vantage.md | 4 +- docs/resources/source_webflow.md | 4 +- docs/resources/source_whisky_hunter.md | 4 +- docs/resources/source_wikipedia_pageviews.md | 10 +- docs/resources/source_woocommerce.md | 4 +- docs/resources/source_xero.md | 4 +- docs/resources/source_xkcd.md | 4 +- docs/resources/source_yandex_metrica.md | 4 +- docs/resources/source_yotpo.md | 6 +- docs/resources/source_younium.md | 8 +- docs/resources/source_youtube_analytics.md | 4 +- docs/resources/source_zendesk_chat.md | 4 +- docs/resources/source_zendesk_sunshine.md | 22 +- docs/resources/source_zendesk_support.md | 10 +- docs/resources/source_zendesk_talk.md | 6 +- docs/resources/source_zenloop.md | 4 +- docs/resources/source_zoho_crm.md | 8 +- docs/resources/source_zoom.md | 4 +- docs/resources/source_zuora.md | 8 +- docs/resources/workspace.md | 2 +- .../airbyte_destination_milvus/data-source.tf | 3 + .../data-source.tf | 3 + .../airbyte_source_datadog/data-source.tf | 4 - .../airbyte_source_openweather/data-source.tf | 4 - .../airbyte_source_public_apis/data-source.tf | 4 - examples/provider/provider.tf | 2 +- .../airbyte_destination_bigquery/resource.tf | 5 +- .../resource.tf | 6 +- .../resource.tf | 6 +- .../airbyte_destination_convex/resource.tf | 4 +- .../airbyte_destination_cumulio/resource.tf | 4 +- .../airbyte_destination_databend/resource.tf | 6 +- .../resource.tf | 6 +- .../airbyte_destination_dev_null/resource.tf | 4 +- .../airbyte_destination_dynamodb/resource.tf | 6 +- .../resource.tf | 6 +- .../airbyte_destination_firebolt/resource.tf | 4 +- .../airbyte_destination_firestore/resource.tf | 4 +- .../airbyte_destination_gcs/resource.tf | 6 +- .../resource.tf | 4 +- .../airbyte_destination_keen/resource.tf | 6 +- .../airbyte_destination_kinesis/resource.tf | 8 +- .../airbyte_destination_langchain/resource.tf | 6 +- .../airbyte_destination_milvus/resource.tf | 36 + .../airbyte_destination_mongodb/resource.tf | 6 +- .../airbyte_destination_mssql/resource.tf | 6 +- .../airbyte_destination_mysql/resource.tf | 6 +- .../airbyte_destination_oracle/resource.tf | 6 +- .../airbyte_destination_pinecone/resource.tf | 28 + .../airbyte_destination_postgres/resource.tf | 6 +- .../airbyte_destination_pubsub/resource.tf | 14 +- .../airbyte_destination_redis/resource.tf | 8 +- .../airbyte_destination_redshift/resource.tf | 12 +- .../airbyte_destination_s3/resource.tf | 8 +- .../airbyte_destination_s3_glue/resource.tf | 8 +- .../airbyte_destination_sftp_json/resource.tf | 6 +- .../airbyte_destination_snowflake/resource.tf | 7 +- .../airbyte_destination_timeplus/resource.tf | 4 +- .../airbyte_destination_typesense/resource.tf | 6 +- .../airbyte_destination_vertica/resource.tf | 6 +- .../airbyte_destination_xata/resource.tf | 4 +- .../resources/airbyte_source_aha/resource.tf | 4 +- .../airbyte_source_aircall/resource.tf | 4 +- .../airbyte_source_airtable/resource.tf | 6 +- .../airbyte_source_alloydb/resource.tf | 10 +- .../airbyte_source_amazon_ads/resource.tf | 11 +- .../resource.tf | 14 +- .../airbyte_source_amazon_sqs/resource.tf | 8 +- .../airbyte_source_amplitude/resource.tf | 6 +- .../airbyte_source_apify_dataset/resource.tf | 5 +- .../airbyte_source_appfollow/resource.tf | 4 +- .../airbyte_source_asana/resource.tf | 4 +- .../airbyte_source_auth0/resource.tf | 5 +- .../airbyte_source_aws_cloudtrail/resource.tf | 4 +- .../resource.tf | 4 +- .../airbyte_source_azure_table/resource.tf | 6 +- .../airbyte_source_bamboo_hr/resource.tf | 4 +- .../airbyte_source_bigcommerce/resource.tf | 4 +- .../airbyte_source_bigquery/resource.tf | 4 +- .../airbyte_source_bing_ads/resource.tf | 8 +- .../airbyte_source_braintree/resource.tf | 8 +- .../airbyte_source_braze/resource.tf | 6 +- .../airbyte_source_chargebee/resource.tf | 4 +- .../airbyte_source_chartmogul/resource.tf | 6 +- .../airbyte_source_clickhouse/resource.tf | 6 +- .../airbyte_source_clickup_api/resource.tf | 6 +- .../airbyte_source_clockify/resource.tf | 4 +- .../airbyte_source_close_com/resource.tf | 4 +- .../resources/airbyte_source_coda/resource.tf | 4 +- .../airbyte_source_coin_api/resource.tf | 6 +- .../airbyte_source_coinmarketcap/resource.tf | 6 +- .../airbyte_source_configcat/resource.tf | 6 +- .../airbyte_source_confluence/resource.tf | 4 +- .../airbyte_source_convex/resource.tf | 4 +- .../airbyte_source_datadog/resource.tf | 22 - .../airbyte_source_datascope/resource.tf | 4 +- .../airbyte_source_delighted/resource.tf | 4 +- .../resources/airbyte_source_dixa/resource.tf | 6 +- .../airbyte_source_dockerhub/resource.tf | 4 +- .../airbyte_source_dremio/resource.tf | 4 +- .../airbyte_source_dynamodb/resource.tf | 6 +- .../airbyte_source_e2e_test_cloud/resource.tf | 8 +- .../airbyte_source_emailoctopus/resource.tf | 4 +- .../airbyte_source_exchange_rates/resource.tf | 6 +- .../resource.tf | 26 +- .../airbyte_source_facebook_pages/resource.tf | 4 +- .../airbyte_source_faker/resource.tf | 12 +- .../airbyte_source_fauna/resource.tf | 8 +- .../airbyte_source_file_secure/resource.tf | 8 +- .../airbyte_source_firebolt/resource.tf | 4 +- .../airbyte_source_freshcaller/resource.tf | 6 +- .../airbyte_source_freshdesk/resource.tf | 6 +- .../airbyte_source_freshsales/resource.tf | 4 +- .../airbyte_source_gainsight_px/resource.tf | 4 +- .../resources/airbyte_source_gcs/resource.tf | 4 +- .../airbyte_source_getlago/resource.tf | 4 +- .../airbyte_source_github/resource.tf | 8 +- .../airbyte_source_gitlab/resource.tf | 6 +- .../airbyte_source_glassfrog/resource.tf | 4 +- .../airbyte_source_gnews/resource.tf | 18 +- .../airbyte_source_google_ads/resource.tf | 4 +- .../resource.tf | 8 +- .../resource.tf | 4 +- .../resource.tf | 6 +- .../resource.tf | 4 +- .../resource.tf | 18 +- .../airbyte_source_google_sheets/resource.tf | 5 +- .../resource.tf | 4 +- .../resource.tf | 8 +- .../airbyte_source_greenhouse/resource.tf | 4 +- .../airbyte_source_gridly/resource.tf | 4 +- .../airbyte_source_harvest/resource.tf | 4 +- .../airbyte_source_hubplanner/resource.tf | 4 +- .../airbyte_source_hubspot/resource.tf | 4 +- .../airbyte_source_insightly/resource.tf | 4 +- .../airbyte_source_instagram/resource.tf | 4 +- .../airbyte_source_instatus/resource.tf | 4 +- .../airbyte_source_intercom/resource.tf | 12 +- .../airbyte_source_ip2whois/resource.tf | 6 +- .../airbyte_source_iterable/resource.tf | 4 +- .../resources/airbyte_source_jira/resource.tf | 10 +- .../airbyte_source_k6_cloud/resource.tf | 4 +- .../airbyte_source_klarna/resource.tf | 10 +- .../airbyte_source_klaviyo/resource.tf | 4 +- .../resource.tf | 4 +- .../resources/airbyte_source_kyve/resource.tf | 14 +- .../airbyte_source_launchdarkly/resource.tf | 4 +- .../airbyte_source_lemlist/resource.tf | 4 +- .../airbyte_source_lever_hiring/resource.tf | 4 +- .../airbyte_source_linkedin_ads/resource.tf | 8 +- .../airbyte_source_linkedin_pages/resource.tf | 4 +- .../airbyte_source_linnworks/resource.tf | 6 +- .../airbyte_source_lokalise/resource.tf | 4 +- .../airbyte_source_mailchimp/resource.tf | 4 +- .../airbyte_source_mailgun/resource.tf | 4 +- .../airbyte_source_mailjet_sms/resource.tf | 4 +- .../airbyte_source_marketo/resource.tf | 4 +- .../airbyte_source_metabase/resource.tf | 6 +- .../resource.tf | 4 +- .../airbyte_source_mixpanel/resource.tf | 14 +- .../airbyte_source_monday/resource.tf | 4 +- .../airbyte_source_mongodb/resource.tf | 4 +- .../resource.tf | 4 +- .../airbyte_source_mssql/resource.tf | 14 +- .../airbyte_source_my_hours/resource.tf | 6 +- .../airbyte_source_mysql/resource.tf | 6 +- .../airbyte_source_netsuite/resource.tf | 6 +- .../airbyte_source_notion/resource.tf | 4 +- .../airbyte_source_nytimes/resource.tf | 6 +- .../resources/airbyte_source_okta/resource.tf | 4 +- .../airbyte_source_omnisend/resource.tf | 4 +- .../airbyte_source_onesignal/resource.tf | 4 +- .../airbyte_source_openweather/resource.tf | 13 - .../airbyte_source_oracle/resource.tf | 10 +- .../resources/airbyte_source_orb/resource.tf | 6 +- .../airbyte_source_orbit/resource.tf | 4 +- .../resource.tf | 8 +- .../airbyte_source_outreach/resource.tf | 4 +- .../resource.tf | 4 +- .../airbyte_source_paystack/resource.tf | 6 +- .../airbyte_source_pendo/resource.tf | 4 +- .../airbyte_source_persistiq/resource.tf | 4 +- .../airbyte_source_pexels_api/resource.tf | 6 +- .../airbyte_source_pinterest/resource.tf | 6 +- .../airbyte_source_pipedrive/resource.tf | 4 +- .../airbyte_source_pocket/resource.tf | 8 +- .../airbyte_source_pokeapi/resource.tf | 4 +- .../resource.tf | 12 +- .../airbyte_source_postgres/resource.tf | 16 +- .../airbyte_source_posthog/resource.tf | 13 +- .../airbyte_source_postmarkapp/resource.tf | 4 +- .../airbyte_source_prestashop/resource.tf | 4 +- .../airbyte_source_public_apis/resource.tf | 8 - .../airbyte_source_punk_api/resource.tf | 4 +- .../resources/airbyte_source_pypi/resource.tf | 4 +- .../airbyte_source_qualaroo/resource.tf | 4 +- .../airbyte_source_quickbooks/resource.tf | 6 +- .../airbyte_source_railz/resource.tf | 4 +- .../airbyte_source_recharge/resource.tf | 4 +- .../airbyte_source_recreation/resource.tf | 4 +- .../airbyte_source_recruitee/resource.tf | 4 +- .../airbyte_source_recurly/resource.tf | 4 +- .../airbyte_source_redshift/resource.tf | 6 +- .../airbyte_source_retently/resource.tf | 4 +- .../airbyte_source_rki_covid/resource.tf | 4 +- .../resources/airbyte_source_rss/resource.tf | 4 +- .../resources/airbyte_source_s3/resource.tf | 34 +- .../airbyte_source_salesforce/resource.tf | 10 +- .../airbyte_source_salesloft/resource.tf | 4 +- .../airbyte_source_sap_fieldglass/resource.tf | 4 +- .../airbyte_source_secoda/resource.tf | 4 +- .../airbyte_source_sendgrid/resource.tf | 4 +- .../airbyte_source_sendinblue/resource.tf | 4 +- .../airbyte_source_senseforce/resource.tf | 6 +- .../airbyte_source_sentry/resource.tf | 6 +- .../resources/airbyte_source_sftp/resource.tf | 6 +- .../airbyte_source_sftp_bulk/resource.tf | 10 +- .../airbyte_source_shopify/resource.tf | 6 +- .../airbyte_source_shortio/resource.tf | 4 +- .../airbyte_source_slack/resource.tf | 8 +- .../airbyte_source_smaily/resource.tf | 4 +- .../airbyte_source_smartengage/resource.tf | 4 +- .../airbyte_source_smartsheets/resource.tf | 6 +- .../resource.tf | 4 +- .../airbyte_source_snowflake/resource.tf | 4 +- .../airbyte_source_sonar_cloud/resource.tf | 4 +- .../airbyte_source_spacex_api/resource.tf | 6 +- .../airbyte_source_square/resource.tf | 8 +- .../airbyte_source_strava/resource.tf | 4 +- .../airbyte_source_stripe/resource.tf | 6 +- .../airbyte_source_survey_sparrow/resource.tf | 4 +- .../airbyte_source_surveymonkey/resource.tf | 4 +- .../airbyte_source_tempo/resource.tf | 4 +- .../resource.tf | 8 +- .../resource.tf | 12 +- .../airbyte_source_todoist/resource.tf | 4 +- .../airbyte_source_trello/resource.tf | 4 +- .../airbyte_source_trustpilot/resource.tf | 4 +- .../resource.tf | 6 +- .../airbyte_source_twilio/resource.tf | 4 +- .../resource.tf | 4 +- .../airbyte_source_twitter/resource.tf | 8 +- .../airbyte_source_typeform/resource.tf | 6 +- .../airbyte_source_us_census/resource.tf | 8 +- .../airbyte_source_vantage/resource.tf | 4 +- .../airbyte_source_webflow/resource.tf | 4 +- .../airbyte_source_whisky_hunter/resource.tf | 4 +- .../resource.tf | 10 +- .../airbyte_source_woocommerce/resource.tf | 4 +- .../resources/airbyte_source_xero/resource.tf | 4 +- .../resources/airbyte_source_xkcd/resource.tf | 4 +- .../airbyte_source_yandex_metrica/resource.tf | 4 +- .../airbyte_source_yotpo/resource.tf | 6 +- .../airbyte_source_younium/resource.tf | 8 +- .../resource.tf | 4 +- .../airbyte_source_zendesk_chat/resource.tf | 4 +- .../resource.tf | 6 +- .../resource.tf | 8 +- .../airbyte_source_zendesk_talk/resource.tf | 6 +- .../airbyte_source_zenloop/resource.tf | 4 +- .../airbyte_source_zoho_crm/resource.tf | 8 +- .../resources/airbyte_source_zoom/resource.tf | 4 +- .../airbyte_source_zuora/resource.tf | 8 +- .../resources/airbyte_workspace/resource.tf | 2 +- files.gen | 134 +- gen.yaml | 4 +- .../destination_bigquery_data_source.go | 6 +- .../provider/destination_bigquery_resource.go | 6 +- .../destination_bigquery_resource_sdk.go | 14 - .../destination_milvus_data_source.go | 474 ++ ... => destination_milvus_data_source_sdk.go} | 4 +- .../provider/destination_milvus_resource.go | 640 +++ .../destination_milvus_resource_sdk.go | 431 ++ .../destination_pinecone_data_source.go | 296 ++ ...> destination_pinecone_data_source_sdk.go} | 4 +- .../provider/destination_pinecone_resource.go | 462 ++ .../destination_pinecone_resource_sdk.go | 233 + .../destination_snowflake_data_source.go | 6 +- .../destination_snowflake_resource.go | 6 +- .../destination_snowflake_resource_sdk.go | 14 - .../destination_typesense_data_source.go | 2 +- .../destination_typesense_resource.go | 2 +- .../destination_typesense_resource_sdk.go | 8 +- internal/provider/provider.go | 10 +- .../provider/source_amazonads_data_source.go | 7 +- .../provider/source_amazonads_resource.go | 7 +- .../provider/source_amazonads_resource_sdk.go | 10 + .../source_apifydataset_data_source.go | 4 + .../provider/source_apifydataset_resource.go | 6 +- .../source_apifydataset_resource_sdk.go | 18 +- internal/provider/source_auth0_data_source.go | 4 + internal/provider/source_auth0_resource.go | 4 + .../provider/source_auth0_resource_sdk.go | 14 + .../provider/source_datadog_data_source.go | 216 - internal/provider/source_datadog_resource.go | 382 -- .../provider/source_datadog_resource_sdk.go | 166 - .../provider/source_googleads_data_source.go | 4 +- .../provider/source_googleads_resource.go | 6 +- .../provider/source_googleads_resource_sdk.go | 14 +- .../source_googlesearchconsole_data_source.go | 27 +- .../source_googlesearchconsole_resource.go | 29 +- ...source_googlesearchconsole_resource_sdk.go | 86 +- .../source_googlesheets_data_source.go | 4 - .../provider/source_googlesheets_resource.go | 4 - .../source_googlesheets_resource_sdk.go | 14 - .../provider/source_insightly_resource_sdk.go | 28 +- .../provider/source_intercom_data_source.go | 8 + internal/provider/source_intercom_resource.go | 8 + .../provider/source_intercom_resource_sdk.go | 38 +- .../provider/source_lemlist_data_source.go | 2 +- internal/provider/source_lemlist_resource.go | 2 +- .../source_linkedinads_data_source.go | 30 +- .../provider/source_linkedinads_resource.go | 30 +- .../source_linkedinads_resource_sdk.go | 4 +- internal/provider/source_mssql_data_source.go | 18 +- internal/provider/source_mssql_resource.go | 18 +- .../provider/source_mssql_resource_sdk.go | 116 +- .../source_openweather_data_source.go | 231 - .../provider/source_openweather_resource.go | 397 -- .../source_openweather_resource_sdk.go | 104 - .../provider/source_postgres_data_source.go | 52 +- internal/provider/source_postgres_resource.go | 52 +- .../provider/source_postgres_resource_sdk.go | 168 +- .../provider/source_posthog_data_source.go | 4 + internal/provider/source_posthog_resource.go | 4 + .../provider/source_posthog_resource_sdk.go | 28 +- .../provider/source_publicapis_data_source.go | 149 - .../source_publicapis_data_source_sdk.go | 14 - .../provider/source_publicapis_resource.go | 315 -- .../source_publicapis_resource_sdk.go | 63 - internal/provider/source_s3_data_source.go | 467 +- internal/provider/source_s3_resource.go | 475 +- internal/provider/source_s3_resource_sdk.go | 834 +++- internal/provider/source_shopify_resource.go | 2 +- .../provider/source_shopify_resource_sdk.go | 78 +- .../provider/source_stripe_data_source.go | 2 +- internal/provider/source_stripe_resource.go | 4 +- .../provider/source_stripe_resource_sdk.go | 14 +- .../source_zendesksunshine_data_source.go | 43 +- .../source_zendesksunshine_resource.go | 33 +- .../source_zendesksunshine_resource_sdk.go | 54 +- .../source_zendesksupport_resource.go | 2 +- .../source_zendesksupport_resource_sdk.go | 14 +- .../provider/type_destination_bigquery.go | 1 - internal/provider/type_destination_milvus.go | 12 + .../type_destination_milvus_embedding.go | 14 + ...ype_destination_milvus_embedding_cohere.go | 10 + ...destination_milvus_embedding_from_field.go | 11 + .../type_destination_milvus_indexing.go | 14 + ...tination_milvus_indexing_authentication.go | 12 + ...ilvus_indexing_authentication_api_token.go | 10 + ...milvus_indexing_authentication_no_auth.go} | 4 +- ...dexing_authentication_username_password.go | 11 + ...tination_milvus_processing_config_model.go | 12 + .../provider/type_destination_pinecone.go | 12 + .../type_destination_pinecone_embedding.go | 12 + .../type_destination_pinecone_indexing.go | 11 + .../provider/type_destination_snowflake.go | 1 - .../provider/type_destination_typesense.go | 2 +- internal/provider/type_source_amazon_ads.go | 1 + .../provider/type_source_apify_dataset.go | 1 + internal/provider/type_source_auth0.go | 1 + internal/provider/type_source_datadog.go | 17 - .../provider/type_source_datadog_queries.go | 11 - .../type_source_google_search_console.go | 15 +- ...gle_search_console_custom_report_config.go | 10 + .../provider/type_source_google_sheets.go | 1 - internal/provider/type_source_intercom.go | 8 +- internal/provider/type_source_mssql.go | 22 +- .../type_source_mssql_replication_method.go | 10 - .../type_source_mssql_update_method.go | 10 + ..._changes_using_change_data_capture_cdc.go} | 2 +- ..._scan_changes_with_user_defined_cursor.go} | 2 +- .../type_source_mysql_update_method.go | 4 +- internal/provider/type_source_openweather.go | 14 - internal/provider/type_source_postgres.go | 22 +- internal/provider/type_source_postgres1.go | 22 +- ...type_source_postgres_replication_method.go | 12 - ...ype_source_postgres_replication_method1.go | 12 - .../type_source_postgres_update_method.go | 12 + .../type_source_postgres_update_method1.go | 12 + ..._detect_changes_with_xmin_system_column.go | 9 + ...read_changes_using_write_ahead_log_cdc.go} | 2 +- ...ead_changes_using_write_ahead_log_cdc1.go} | 2 +- ..._scan_changes_with_user_defined_cursor.go} | 2 +- ...read_changes_using_write_ahead_log_cdc.go} | 2 +- ...ead_changes_using_write_ahead_log_cdc1.go} | 2 +- internal/provider/type_source_posthog.go | 9 +- internal/provider/type_source_s3.go | 18 +- ...type_source_s3_file_based_stream_config.go | 18 + ...urce_s3_file_based_stream_config_format.go | 14 + ..._based_stream_config_format_avro_format.go | 10 + ...e_based_stream_config_format_csv_format.go | 22 + ...format_csv_format_csv_header_definition.go | 9 + ...mat_csv_header_definition_autogenerated.go | 9 + ...v_format_csv_header_definition_from_csv.go | 9 + ...mat_csv_header_definition_user_provided.go | 10 + ...based_stream_config_format_jsonl_format.go | 9 + ...sed_stream_config_format_parquet_format.go | 10 + ...e_based_stream_config_format_csv_format.go | 22 + ...format_csv_format_csv_header_definition.go | 9 + .../provider/type_source_zendesk_sunshine1.go | 12 - ...e_zendesk_sunshine_authorization_method.go | 8 +- ..._zendesk_sunshine_authorization_method1.go | 10 - ...sunshine_authorization_method_api_token.go | 7 +- ...unshine_authorization_method_api_token1.go | 12 - ..._sunshine_authorization_method_o_auth20.go | 9 +- ...sunshine_authorization_method_o_auth201.go | 13 - ...e_update_authorization_method_api_token.go | 12 - ..._update_authorization_method_api_token1.go | 12 - ...ne_update_authorization_method_o_auth20.go | 13 - ...e_update_authorization_method_o_auth201.go | 13 - internal/sdk/destinations.go | 472 ++ ...nweather.go => createdestinationmilvus.go} | 8 +- ...icapis.go => createdestinationpinecone.go} | 8 +- .../models/operations/createsourcedatadog.go | 16 - ...nweather.go => deletedestinationmilvus.go} | 6 +- ...icapis.go => deletedestinationpinecone.go} | 6 +- .../models/operations/deletesourcedatadog.go | 17 - .../models/operations/getdestinationmilvus.go | 20 + .../operations/getdestinationpinecone.go | 20 + .../pkg/models/operations/getsourcedatadog.go | 20 - .../models/operations/getsourceopenweather.go | 20 - .../models/operations/getsourcepublicapis.go | 20 - .../models/operations/putdestinationmilvus.go | 19 + .../operations/putdestinationpinecone.go | 19 + .../pkg/models/operations/putsourcedatadog.go | 19 - .../models/operations/putsourceopenweather.go | 19 - .../models/operations/putsourcepublicapis.go | 19 - ...onresponseroottypeforconnectionresponse.go | 24 - .../pkg/models/shared/connectionsresponse.go | 1 - .../shared/destinationazureblobstorage.go | 24 +- .../destinationazureblobstorageupdate.go | 24 +- .../pkg/models/shared/destinationbigquery.go | 4 +- .../shared/destinationbigqueryupdate.go | 4 +- .../models/shared/destinationdatabricks.go | 24 +- .../shared/destinationdatabricksupdate.go | 24 +- .../sdk/pkg/models/shared/destinationgcs.go | 74 +- .../pkg/models/shared/destinationgcsupdate.go | 74 +- .../pkg/models/shared/destinationlangchain.go | 50 +- .../shared/destinationlangchainupdate.go | 50 +- .../pkg/models/shared/destinationmilvus.go | 493 ++ .../shared/destinationmilvuscreaterequest.go | 9 + ...uest.go => destinationmilvusputrequest.go} | 4 +- .../models/shared/destinationmilvusupdate.go | 468 ++ .../pkg/models/shared/destinationmongodb.go | 26 +- .../models/shared/destinationmongodbupdate.go | 26 +- .../pkg/models/shared/destinationpinecone.go | 244 + ...go => destinationpineconecreaterequest.go} | 4 +- .../shared/destinationpineconeputrequest.go | 9 + .../shared/destinationpineconeupdate.go | 219 + .../sdk/pkg/models/shared/destinations3.go | 50 +- .../pkg/models/shared/destinations3update.go | 50 +- .../pkg/models/shared/destinationsnowflake.go | 28 +- .../shared/destinationsnowflakeupdate.go | 28 +- .../pkg/models/shared/destinationsresponse.go | 1 - .../pkg/models/shared/destinationtypesense.go | 2 +- .../shared/destinationtypesenseupdate.go | 2 +- .../sdk/pkg/models/shared/jobsresponse.go | 1 - .../sdk/pkg/models/shared/jobstatusenum.go | 1 - .../sdk/pkg/models/shared/sourceairtable.go | 24 +- .../pkg/models/shared/sourceairtableupdate.go | 24 +- .../sdk/pkg/models/shared/sourcealloydb.go | 24 +- .../pkg/models/shared/sourcealloydbupdate.go | 24 +- .../sdk/pkg/models/shared/sourceamazonads.go | 4 +- .../models/shared/sourceamazonadsupdate.go | 4 +- .../pkg/models/shared/sourceapifydataset.go | 4 +- .../models/shared/sourceapifydatasetupdate.go | 4 +- internal/sdk/pkg/models/shared/sourceasana.go | 24 +- .../pkg/models/shared/sourceasanaupdate.go | 24 +- internal/sdk/pkg/models/shared/sourceauth0.go | 26 +- .../pkg/models/shared/sourceauth0update.go | 26 +- .../sdk/pkg/models/shared/sourcedatadog.go | 132 - .../shared/sourcedatadogcreaterequest.go | 11 - .../pkg/models/shared/sourcedatadogupdate.go | 107 - .../pkg/models/shared/sourcee2etestcloud.go | 24 +- .../models/shared/sourcee2etestcloudupdate.go | 24 +- .../sdk/pkg/models/shared/sourcegithub.go | 24 +- .../pkg/models/shared/sourcegithubupdate.go | 24 +- .../sdk/pkg/models/shared/sourcegitlab.go | 24 +- .../pkg/models/shared/sourcegitlabupdate.go | 24 +- .../sdk/pkg/models/shared/sourcegoogleads.go | 6 +- .../models/shared/sourcegoogleadsupdate.go | 6 +- .../shared/sourcegoogleanalyticsdataapi.go | 24 +- .../sourcegoogleanalyticsdataapiupdate.go | 24 +- .../models/shared/sourcegoogleanalyticsv4.go | 24 +- .../shared/sourcegoogleanalyticsv4update.go | 24 +- .../models/shared/sourcegoogledirectory.go | 24 +- .../shared/sourcegoogledirectoryupdate.go | 24 +- .../shared/sourcegooglesearchconsole.go | 98 +- .../shared/sourcegooglesearchconsoleupdate.go | 98 +- .../pkg/models/shared/sourcegooglesheets.go | 30 +- .../models/shared/sourcegooglesheetsupdate.go | 26 +- .../sdk/pkg/models/shared/sourceharvest.go | 24 +- .../pkg/models/shared/sourceharvestupdate.go | 24 +- .../sdk/pkg/models/shared/sourcehubspot.go | 24 +- .../pkg/models/shared/sourcehubspotupdate.go | 24 +- .../sdk/pkg/models/shared/sourceinsightly.go | 4 +- .../models/shared/sourceinsightlyupdate.go | 4 +- .../sdk/pkg/models/shared/sourceintercom.go | 8 +- .../pkg/models/shared/sourceintercomupdate.go | 4 + .../sdk/pkg/models/shared/sourcelemlist.go | 2 +- .../pkg/models/shared/sourcelemlistupdate.go | 2 +- .../pkg/models/shared/sourceleverhiring.go | 24 +- .../models/shared/sourceleverhiringupdate.go | 24 +- .../pkg/models/shared/sourcelinkedinads.go | 110 +- .../models/shared/sourcelinkedinadsupdate.go | 110 +- .../pkg/models/shared/sourcelinkedinpages.go | 24 +- .../shared/sourcelinkedinpagesupdate.go | 24 +- .../sdk/pkg/models/shared/sourcemailchimp.go | 24 +- .../models/shared/sourcemailchimpupdate.go | 24 +- .../pkg/models/shared/sourcemicrosoftteams.go | 24 +- .../shared/sourcemicrosoftteamsupdate.go | 24 +- .../sdk/pkg/models/shared/sourcemixpanel.go | 24 +- .../pkg/models/shared/sourcemixpanelupdate.go | 24 +- .../sdk/pkg/models/shared/sourcemonday.go | 24 +- .../pkg/models/shared/sourcemondayupdate.go | 24 +- .../sdk/pkg/models/shared/sourcemongodb.go | 26 +- .../pkg/models/shared/sourcemongodbupdate.go | 26 +- internal/sdk/pkg/models/shared/sourcemssql.go | 164 +- .../pkg/models/shared/sourcemssqlupdate.go | 162 +- internal/sdk/pkg/models/shared/sourcemysql.go | 24 +- .../pkg/models/shared/sourcemysqlupdate.go | 24 +- .../sdk/pkg/models/shared/sourcenotion.go | 24 +- .../pkg/models/shared/sourcenotionupdate.go | 24 +- internal/sdk/pkg/models/shared/sourceokta.go | 24 +- .../sdk/pkg/models/shared/sourceoktaupdate.go | 24 +- .../pkg/models/shared/sourceopenweather.go | 246 - .../shared/sourceopenweathercreaterequest.go | 11 - .../models/shared/sourceopenweatherupdate.go | 221 - .../sdk/pkg/models/shared/sourcepinterest.go | 24 +- .../models/shared/sourcepinterestupdate.go | 24 +- .../sdk/pkg/models/shared/sourcepostgres.go | 228 +- .../pkg/models/shared/sourcepostgresupdate.go | 226 +- .../sdk/pkg/models/shared/sourceposthog.go | 6 +- .../pkg/models/shared/sourceposthogupdate.go | 2 + .../sdk/pkg/models/shared/sourcepublicapis.go | 36 - .../shared/sourcepublicapiscreaterequest.go | 11 - .../shared/sourcepublicapisputrequest.go | 9 - .../models/shared/sourcepublicapisupdate.go | 6 - .../sdk/pkg/models/shared/sourceretently.go | 24 +- .../pkg/models/shared/sourceretentlyupdate.go | 24 +- internal/sdk/pkg/models/shared/sources3.go | 595 ++- .../models/shared/sources3createrequest.go | 2 + .../pkg/models/shared/sources3putrequest.go | 2 + .../sdk/pkg/models/shared/sources3update.go | 595 ++- .../sdk/pkg/models/shared/sourcesalesloft.go | 24 +- .../models/shared/sourcesalesloftupdate.go | 24 +- .../sdk/pkg/models/shared/sourceshopify.go | 74 +- .../pkg/models/shared/sourceshopifyupdate.go | 74 +- internal/sdk/pkg/models/shared/sourceslack.go | 24 +- .../pkg/models/shared/sourceslackupdate.go | 24 +- .../pkg/models/shared/sourcesmartsheets.go | 24 +- .../models/shared/sourcesmartsheetsupdate.go | 24 +- .../sdk/pkg/models/shared/sourcesnowflake.go | 24 +- .../models/shared/sourcesnowflakeupdate.go | 24 +- .../sdk/pkg/models/shared/sourcesquare.go | 24 +- .../pkg/models/shared/sourcesquareupdate.go | 24 +- .../sdk/pkg/models/shared/sourcesresponse.go | 1 - .../sdk/pkg/models/shared/sourcestripe.go | 4 +- .../pkg/models/shared/sourcestripeupdate.go | 4 +- .../models/shared/sourcetiktokmarketing.go | 24 +- .../shared/sourcetiktokmarketingupdate.go | 24 +- .../sdk/pkg/models/shared/sourcetrustpilot.go | 24 +- .../models/shared/sourcetrustpilotupdate.go | 24 +- .../sdk/pkg/models/shared/sourcetypeform.go | 24 +- .../pkg/models/shared/sourcetypeformupdate.go | 24 +- .../pkg/models/shared/sourcezendeskchat.go | 24 +- .../models/shared/sourcezendeskchatupdate.go | 24 +- .../models/shared/sourcezendesksunshine.go | 124 +- .../shared/sourcezendesksunshineupdate.go | 124 +- .../pkg/models/shared/sourcezendesksupport.go | 26 +- .../shared/sourcezendesksupportupdate.go | 26 +- ...aceresponseroottypeforworkspaceresponse.go | 10 - .../pkg/models/shared/workspacesresponse.go | 1 - internal/sdk/pkg/utils/form.go | 15 +- internal/sdk/pkg/utils/headers.go | 14 +- internal/sdk/pkg/utils/pathparams.go | 14 +- internal/sdk/pkg/utils/queryparams.go | 24 +- internal/sdk/pkg/utils/requestbody.go | 28 +- internal/sdk/pkg/utils/security.go | 40 +- internal/sdk/pkg/utils/utils.go | 12 + internal/sdk/sdk.go | 4 +- internal/sdk/sources.go | 1202 +---- 829 files changed, 16416 insertions(+), 10674 deletions(-) create mode 100755 .gitattributes create mode 100644 docs/data-sources/destination_milvus.md create mode 100644 docs/data-sources/destination_pinecone.md delete mode 100644 docs/data-sources/source_datadog.md delete mode 100644 docs/data-sources/source_openweather.md delete mode 100644 docs/data-sources/source_public_apis.md create mode 100644 docs/resources/destination_milvus.md create mode 100644 docs/resources/destination_pinecone.md delete mode 100644 docs/resources/source_datadog.md delete mode 100644 docs/resources/source_openweather.md delete mode 100644 docs/resources/source_public_apis.md create mode 100755 examples/data-sources/airbyte_destination_milvus/data-source.tf create mode 100755 examples/data-sources/airbyte_destination_pinecone/data-source.tf delete mode 100755 examples/data-sources/airbyte_source_datadog/data-source.tf delete mode 100755 examples/data-sources/airbyte_source_openweather/data-source.tf delete mode 100755 examples/data-sources/airbyte_source_public_apis/data-source.tf create mode 100755 examples/resources/airbyte_destination_milvus/resource.tf create mode 100755 examples/resources/airbyte_destination_pinecone/resource.tf delete mode 100755 examples/resources/airbyte_source_datadog/resource.tf delete mode 100755 examples/resources/airbyte_source_openweather/resource.tf delete mode 100755 examples/resources/airbyte_source_public_apis/resource.tf create mode 100755 internal/provider/destination_milvus_data_source.go rename internal/provider/{source_datadog_data_source_sdk.go => destination_milvus_data_source_sdk.go} (65%) create mode 100755 internal/provider/destination_milvus_resource.go create mode 100755 internal/provider/destination_milvus_resource_sdk.go create mode 100755 internal/provider/destination_pinecone_data_source.go rename internal/provider/{source_openweather_data_source_sdk.go => destination_pinecone_data_source_sdk.go} (64%) create mode 100755 internal/provider/destination_pinecone_resource.go create mode 100755 internal/provider/destination_pinecone_resource_sdk.go delete mode 100755 internal/provider/source_datadog_data_source.go delete mode 100755 internal/provider/source_datadog_resource.go delete mode 100755 internal/provider/source_datadog_resource_sdk.go delete mode 100755 internal/provider/source_openweather_data_source.go delete mode 100755 internal/provider/source_openweather_resource.go delete mode 100755 internal/provider/source_openweather_resource_sdk.go delete mode 100755 internal/provider/source_publicapis_data_source.go delete mode 100755 internal/provider/source_publicapis_data_source_sdk.go delete mode 100755 internal/provider/source_publicapis_resource.go delete mode 100755 internal/provider/source_publicapis_resource_sdk.go create mode 100755 internal/provider/type_destination_milvus.go create mode 100755 internal/provider/type_destination_milvus_embedding.go create mode 100755 internal/provider/type_destination_milvus_embedding_cohere.go create mode 100755 internal/provider/type_destination_milvus_embedding_from_field.go create mode 100755 internal/provider/type_destination_milvus_indexing.go create mode 100755 internal/provider/type_destination_milvus_indexing_authentication.go create mode 100755 internal/provider/type_destination_milvus_indexing_authentication_api_token.go rename internal/provider/{type_source_public_apis.go => type_destination_milvus_indexing_authentication_no_auth.go} (62%) create mode 100755 internal/provider/type_destination_milvus_indexing_authentication_username_password.go create mode 100755 internal/provider/type_destination_milvus_processing_config_model.go create mode 100755 internal/provider/type_destination_pinecone.go create mode 100755 internal/provider/type_destination_pinecone_embedding.go create mode 100755 internal/provider/type_destination_pinecone_indexing.go delete mode 100755 internal/provider/type_source_datadog.go delete mode 100755 internal/provider/type_source_datadog_queries.go create mode 100755 internal/provider/type_source_google_search_console_custom_report_config.go delete mode 100755 internal/provider/type_source_mssql_replication_method.go create mode 100755 internal/provider/type_source_mssql_update_method.go rename internal/provider/{type_source_mssql_replication_method_logical_replication_cdc.go => type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go} (84%) rename internal/provider/{type_source_mysql_update_method_scan_changes_with_user_defined_cursor.go => type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go} (75%) delete mode 100755 internal/provider/type_source_openweather.go delete mode 100755 internal/provider/type_source_postgres_replication_method.go delete mode 100755 internal/provider/type_source_postgres_replication_method1.go create mode 100755 internal/provider/type_source_postgres_update_method.go create mode 100755 internal/provider/type_source_postgres_update_method1.go create mode 100755 internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go rename internal/provider/{type_source_postgres_update_replication_method_logical_replication_cdc.go => type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go} (91%) rename internal/provider/{type_source_postgres_update_replication_method_logical_replication_cdc1.go => type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go} (91%) rename internal/provider/{type_source_mssql_replication_method_standard.go => type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go} (72%) rename internal/provider/{type_source_postgres_replication_method_logical_replication_cdc1.go => type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go} (89%) rename internal/provider/{type_source_postgres_replication_method_logical_replication_cdc.go => type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go} (88%) create mode 100755 internal/provider/type_source_s3_file_based_stream_config.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go create mode 100755 internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go create mode 100755 internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go create mode 100755 internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go delete mode 100755 internal/provider/type_source_zendesk_sunshine1.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_authorization_method1.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_authorization_method_api_token1.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth201.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token1.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth20.go delete mode 100755 internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth201.go rename internal/sdk/pkg/models/operations/{createsourceopenweather.go => createdestinationmilvus.go} (56%) rename internal/sdk/pkg/models/operations/{createsourcepublicapis.go => createdestinationpinecone.go} (56%) delete mode 100755 internal/sdk/pkg/models/operations/createsourcedatadog.go rename internal/sdk/pkg/models/operations/{deletesourceopenweather.go => deletedestinationmilvus.go} (51%) rename internal/sdk/pkg/models/operations/{deletesourcepublicapis.go => deletedestinationpinecone.go} (51%) delete mode 100755 internal/sdk/pkg/models/operations/deletesourcedatadog.go create mode 100755 internal/sdk/pkg/models/operations/getdestinationmilvus.go create mode 100755 internal/sdk/pkg/models/operations/getdestinationpinecone.go delete mode 100755 internal/sdk/pkg/models/operations/getsourcedatadog.go delete mode 100755 internal/sdk/pkg/models/operations/getsourceopenweather.go delete mode 100755 internal/sdk/pkg/models/operations/getsourcepublicapis.go create mode 100755 internal/sdk/pkg/models/operations/putdestinationmilvus.go create mode 100755 internal/sdk/pkg/models/operations/putdestinationpinecone.go delete mode 100755 internal/sdk/pkg/models/operations/putsourcedatadog.go delete mode 100755 internal/sdk/pkg/models/operations/putsourceopenweather.go delete mode 100755 internal/sdk/pkg/models/operations/putsourcepublicapis.go delete mode 100755 internal/sdk/pkg/models/shared/connectionresponseroottypeforconnectionresponse.go create mode 100755 internal/sdk/pkg/models/shared/destinationmilvus.go create mode 100755 internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go rename internal/sdk/pkg/models/shared/{sourceopenweatherputrequest.go => destinationmilvusputrequest.go} (66%) create mode 100755 internal/sdk/pkg/models/shared/destinationmilvusupdate.go create mode 100755 internal/sdk/pkg/models/shared/destinationpinecone.go rename internal/sdk/pkg/models/shared/{sourcedatadogputrequest.go => destinationpineconecreaterequest.go} (65%) create mode 100755 internal/sdk/pkg/models/shared/destinationpineconeputrequest.go create mode 100755 internal/sdk/pkg/models/shared/destinationpineconeupdate.go delete mode 100755 internal/sdk/pkg/models/shared/sourcedatadog.go delete mode 100755 internal/sdk/pkg/models/shared/sourcedatadogcreaterequest.go delete mode 100755 internal/sdk/pkg/models/shared/sourcedatadogupdate.go delete mode 100755 internal/sdk/pkg/models/shared/sourceopenweather.go delete mode 100755 internal/sdk/pkg/models/shared/sourceopenweathercreaterequest.go delete mode 100755 internal/sdk/pkg/models/shared/sourceopenweatherupdate.go delete mode 100755 internal/sdk/pkg/models/shared/sourcepublicapis.go delete mode 100755 internal/sdk/pkg/models/shared/sourcepublicapiscreaterequest.go delete mode 100755 internal/sdk/pkg/models/shared/sourcepublicapisputrequest.go delete mode 100755 internal/sdk/pkg/models/shared/sourcepublicapisupdate.go delete mode 100755 internal/sdk/pkg/models/shared/workspaceresponseroottypeforworkspaceresponse.go diff --git a/.gitattributes b/.gitattributes new file mode 100755 index 000000000..e6a994416 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# This allows generated code to be indexed correctly +*.go linguist-generated=false \ No newline at end of file diff --git a/README.md b/README.md index d9d2d7f59..edacfb928 100755 --- a/README.md +++ b/README.md @@ -24,7 +24,7 @@ terraform { required_providers { airbyte = { source = "airbytehq/airbyte" - version = "0.3.3" + version = "0.3.4" } } } diff --git a/airbyte.yaml b/airbyte.yaml index 069c0a303..ba26f4a2c 100644 --- a/airbyte.yaml +++ b/airbyte.yaml @@ -3946,92 +3946,6 @@ paths: type: "string" in: "path" required: true - /sources#Datadog: - post: - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SourceDatadogCreateRequest" - tags: - - "Sources" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/SourceResponse" - description: "Successful operation" - "400": - description: "Invalid data" - "403": - description: "Not allowed" - operationId: "createSourceDatadog" - summary: "Create a source" - description: - "Creates a source given a name, workspace id, and a json blob containing\ - \ the configuration for the source." - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Datadog#create - /sources/{sourceId}#Datadog: - get: - tags: - - "Sources" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/SourceResponse" - description: "Get a Source by the id in the path." - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "getSourceDatadog" - summary: "Get Source details" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Datadog#read - put: - tags: - - "Sources" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SourceDatadogPutRequest" - responses: - "2XX": - description: "The resource was updated successfully" - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "putSourceDatadog" - summary: "Update a Source fully" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Datadog#update - delete: - tags: - - "Sources" - responses: - "2XX": - description: "The resource was deleted successfully" - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "deleteSourceDatadog" - summary: "Delete a Source" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Datadog#delete - parameters: - - name: "sourceId" - schema: - format: "UUID" - type: "string" - in: "path" - required: true /sources#Datascope: post: requestBody: @@ -10654,92 +10568,6 @@ paths: type: "string" in: "path" required: true - /sources#Openweather: - post: - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SourceOpenweatherCreateRequest" - tags: - - "Sources" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/SourceResponse" - description: "Successful operation" - "400": - description: "Invalid data" - "403": - description: "Not allowed" - operationId: "createSourceOpenweather" - summary: "Create a source" - description: - "Creates a source given a name, workspace id, and a json blob containing\ - \ the configuration for the source." - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#create - /sources/{sourceId}#Openweather: - get: - tags: - - "Sources" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/SourceResponse" - description: "Get a Source by the id in the path." - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "getSourceOpenweather" - summary: "Get Source details" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#read - put: - tags: - - "Sources" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SourceOpenweatherPutRequest" - responses: - "2XX": - description: "The resource was updated successfully" - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "putSourceOpenweather" - summary: "Update a Source fully" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#update - delete: - tags: - - "Sources" - responses: - "2XX": - description: "The resource was deleted successfully" - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "deleteSourceOpenweather" - summary: "Delete a Source" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_Openweather#delete - parameters: - - name: "sourceId" - schema: - format: "UUID" - type: "string" - in: "path" - required: true /sources#Oracle: post: requestBody: @@ -12374,92 +12202,6 @@ paths: type: "string" in: "path" required: true - /sources#PublicApis: - post: - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SourcePublicApisCreateRequest" - tags: - - "Sources" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/SourceResponse" - description: "Successful operation" - "400": - description: "Invalid data" - "403": - description: "Not allowed" - operationId: "createSourcePublicApis" - summary: "Create a source" - description: - "Creates a source given a name, workspace id, and a json blob containing\ - \ the configuration for the source." - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PublicApis#create - /sources/{sourceId}#PublicApis: - get: - tags: - - "Sources" - responses: - "200": - content: - application/json: - schema: - $ref: "#/components/schemas/SourceResponse" - description: "Get a Source by the id in the path." - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "getSourcePublicApis" - summary: "Get Source details" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PublicApis#read - put: - tags: - - "Sources" - requestBody: - content: - application/json: - schema: - $ref: "#/components/schemas/SourcePublicApisPutRequest" - responses: - "2XX": - description: "The resource was updated successfully" - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "putSourcePublicApis" - summary: "Update a Source fully" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PublicApis#update - delete: - tags: - - "Sources" - responses: - "2XX": - description: "The resource was deleted successfully" - "403": - description: "Not allowed" - "404": - description: "Not found" - operationId: "deleteSourcePublicApis" - summary: "Delete a Source" - x-use-speakeasy-middleware: true - x-speakeasy-entity-operation: Source_PublicApis#delete - parameters: - - name: "sourceId" - schema: - format: "UUID" - type: "string" - in: "path" - required: true /sources#PunkApi: post: requestBody: @@ -20114,6 +19856,92 @@ paths: type: "string" in: "path" required: true + /destinations#Milvus: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMilvusCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationMilvus" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#create + /destinations/{destinationId}#Milvus: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationMilvus" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationMilvusPutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationMilvus" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationMilvus" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Milvus#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /destinations#Mongodb: post: requestBody: @@ -20458,6 +20286,92 @@ paths: type: "string" in: "path" required: true + /destinations#Pinecone: + post: + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPineconeCreateRequest" + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Successful operation" + "400": + description: "Invalid data" + "403": + description: "Not allowed" + operationId: "createDestinationPinecone" + summary: "Create a destination" + description: + "Creates a destination given a name, workspace id, and a json blob containing\ + \ the configuration for the destination." + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#create + /destinations/{destinationId}#Pinecone: + get: + tags: + - "Destinations" + responses: + "200": + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationResponse" + description: "Get a Destination by the id in the path." + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "getDestinationPinecone" + summary: "Get Destination details" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#read + put: + tags: + - "Destinations" + requestBody: + content: + application/json: + schema: + $ref: "#/components/schemas/DestinationPineconePutRequest" + responses: + "2XX": + description: "The resource was updated successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "putDestinationPinecone" + summary: "Update a Destination fully" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#update + delete: + tags: + - "Destinations" + responses: + "2XX": + description: "The resource was deleted successfully" + "403": + description: "Not allowed" + "404": + description: "Not found" + operationId: "deleteDestinationPinecone" + summary: "Delete a Destination" + x-use-speakeasy-middleware: true + x-speakeasy-entity-operation: Destination_Pinecone#delete + parameters: + - name: "destinationId" + schema: + format: "UUID" + type: "string" + in: "path" + required: true /destinations#Postgres: post: requestBody: @@ -21697,6 +21611,8 @@ components: \ flow." type: "string" x-implements: "io.airbyte.public_api.server.helpers.ConfigurableActor" + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true SourcePutRequest: required: - "name" @@ -21708,6 +21624,8 @@ components: configuration: $ref: "#/components/schemas/SourceConfiguration" x-implements: "io.airbyte.public_api.server.helpers.ConfigurableActor" + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true SourcePatchRequest: type: "object" properties: @@ -21724,6 +21642,8 @@ components: \ flow." type: "string" x-implements: "io.airbyte.public_api.server.helpers.ConfigurableActor" + x-speakeasy-entity: Source + x-speakeasy-param-suppress-computed-diff: true InitiateOauthRequest: title: "Root Type for initiate-oauth-post-body" description: "POST body for initiating OAuth via the public API" @@ -21988,6 +21908,8 @@ components: configuration: $ref: "#/components/schemas/DestinationConfiguration" x-implements: "io.airbyte.public_api.server.helpers.ConfigurableActor" + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true DestinationPatchRequest: type: "object" properties: @@ -21996,6 +21918,8 @@ components: configuration: $ref: "#/components/schemas/DestinationConfiguration" x-implements: "io.airbyte.public_api.server.helpers.ConfigurableActor" + x-speakeasy-entity: Destination + x-speakeasy-param-suppress-computed-diff: true DestinationPutRequest: required: - "name" @@ -22632,24 +22556,6 @@ components: > personal access token." type: "string" airbyte_secret: true - source-public-apis: - title: "Public Apis Spec" - type: "object" - required: - - "sourceType" - properties: - sourceType: - title: "public-apis" - const: "public-apis" - enum: - - "public-apis" - order: 0 - type: "string" - source-public-apis-update: - title: "Public Apis Spec" - type: "object" - required: [] - properties: {} source-yotpo: title: "Yotpo Spec" type: "object" @@ -23338,301 +23244,95 @@ components: required: - "api_key" - "start_date" - source-datadog: - title: "Datadog Source Spec" + source-quickbooks: + title: "Source QuickBooks Spec" type: "object" required: - - "api_key" - - "application_key" + - "credentials" + - "start_date" + - "sandbox" - "sourceType" properties: - api_key: - title: "API Key" - description: "Datadog API key" - type: "string" - airbyte_secret: true - order: 1 - application_key: - title: "Application Key" - description: "Datadog application key" - type: "string" - airbyte_secret: true - order: 2 - query: - title: "Query" - description: "The search query. This just applies to Incremental syncs.\ - \ If empty, it'll collect all logs." - type: "string" - order: 3 - start_date: - title: "Start date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ - \ data before this date will not be replicated. This just applies to Incremental\ - \ syncs." - type: "string" - examples: - - "2022-10-01T00:00:00Z" - order: 4 - site: - title: "Site" - description: "The site where Datadog data resides in." - type: "string" - enum: - - "datadoghq.com" - - "us3.datadoghq.com" - - "us5.datadoghq.com" - - "datadoghq.eu" - - "ddog-gov.com" - default: "datadoghq.com" - order: 5 - end_date: - title: "End date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - description: "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ - \ after this date will not be replicated. An empty value will represent\ - \ the current datetime for each execution. This just applies to Incremental\ - \ syncs." - examples: - - "2022-10-01T00:00:00Z" - type: "string" - order: 6 - max_records_per_request: - type: "integer" - title: "Max records per requests" - default: 5000 - minimum: 1 - maximum: 5000 - description: "Maximum number of records to collect per request." - order: 7 - queries: - title: "Queries" - description: "List of queries to be run and used as inputs." - type: "array" - order: 8 - default: [] - items: - type: "object" + credentials: + title: "Authorization Method" + type: "object" + order: 0 + oneOf: + - type: "object" + title: "OAuth2.0" required: - - "name" - - "data_source" - - "query" + - "client_id" + - "client_secret" + - "refresh_token" + - "access_token" + - "token_expiry_date" + - "realm_id" properties: - name: - title: "Query Name" - description: "The variable name for use in queries." - type: "string" - order: 1 - data_source: - title: "Data Source" - description: "A data source that is powered by the platform." + auth_type: type: "string" + const: "oauth2.0" enum: - - "metrics" - - "cloud_cost" - - "logs" - - "rum" - order: 2 - query: - title: "Query" - description: "A classic query string." + - "oauth2.0" + client_id: type: "string" - order: 3 - sourceType: - title: "datadog" - const: "datadog" - enum: - - "datadog" - order: 0 - type: "string" - source-datadog-update: - title: "Datadog Source Spec" - type: "object" - required: - - "api_key" - - "application_key" - properties: - api_key: - title: "API Key" - description: "Datadog API key" - type: "string" - airbyte_secret: true + title: "Client ID" + description: "Identifies which app is making the request. Obtain this\ + \ value from the Keys tab on the app profile via My Apps on the\ + \ developer site. There are two versions of this key: development\ + \ and production." + client_secret: + description: " Obtain this value from the Keys tab on the app profile\ + \ via My Apps on the developer site. There are two versions of this\ + \ key: development and production." + title: "Client Secret" + type: "string" + airbyte_secret: true + refresh_token: + description: "A token used when refreshing the access token." + title: "Refresh Token" + type: "string" + airbyte_secret: true + access_token: + description: "Access token fot making authenticated requests." + title: "Access Token" + type: "string" + airbyte_secret: true + token_expiry_date: + type: "string" + title: "Token Expiry Date" + description: "The date-time when the access token should be refreshed." + format: "date-time" + realm_id: + description: "Labeled Company ID. The Make API Calls panel is populated\ + \ with the realm id and the current access token." + title: "Realm ID" + type: "string" + airbyte_secret: true + start_date: order: 1 - application_key: - title: "Application Key" - description: "Datadog application key" - type: "string" - airbyte_secret: true - order: 2 - query: - title: "Query" - description: "The search query. This just applies to Incremental syncs.\ - \ If empty, it'll collect all logs." + description: "The default value to use if no bookmark exists for an endpoint\ + \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ + \ date will not be replicated." + title: "Start Date" type: "string" - order: 3 - start_date: - title: "Start date" + format: "date-time" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ - \ data before this date will not be replicated. This just applies to Incremental\ - \ syncs." - type: "string" examples: - - "2022-10-01T00:00:00Z" - order: 4 - site: - title: "Site" - description: "The site where Datadog data resides in." - type: "string" + - "2021-03-20T00:00:00Z" + sandbox: + order: 2 + description: "Determines whether to use the sandbox or production environment." + title: "Sandbox" + type: "boolean" + default: false + sourceType: + title: "quickbooks" + const: "quickbooks" enum: - - "datadoghq.com" - - "us3.datadoghq.com" - - "us5.datadoghq.com" - - "datadoghq.eu" - - "ddog-gov.com" - default: "datadoghq.com" - order: 5 - end_date: - title: "End date" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - description: "UTC date and time in the format 2017-01-25T00:00:00Z. Data\ - \ after this date will not be replicated. An empty value will represent\ - \ the current datetime for each execution. This just applies to Incremental\ - \ syncs." - examples: - - "2022-10-01T00:00:00Z" + - "quickbooks" + order: 0 type: "string" - order: 6 - max_records_per_request: - type: "integer" - title: "Max records per requests" - default: 5000 - minimum: 1 - maximum: 5000 - description: "Maximum number of records to collect per request." - order: 7 - queries: - title: "Queries" - description: "List of queries to be run and used as inputs." - type: "array" - order: 8 - default: [] - items: - type: "object" - required: - - "name" - - "data_source" - - "query" - properties: - name: - title: "Query Name" - description: "The variable name for use in queries." - type: "string" - order: 1 - data_source: - title: "Data Source" - description: "A data source that is powered by the platform." - type: "string" - enum: - - "metrics" - - "cloud_cost" - - "logs" - - "rum" - order: 2 - query: - title: "Query" - description: "A classic query string." - type: "string" - order: 3 - source-quickbooks: - title: "Source QuickBooks Spec" - type: "object" - required: - - "credentials" - - "start_date" - - "sandbox" - - "sourceType" - properties: - credentials: - title: "Authorization Method" - type: "object" - order: 0 - oneOf: - - type: "object" - title: "OAuth2.0" - required: - - "client_id" - - "client_secret" - - "refresh_token" - - "access_token" - - "token_expiry_date" - - "realm_id" - properties: - auth_type: - type: "string" - const: "oauth2.0" - enum: - - "oauth2.0" - client_id: - type: "string" - title: "Client ID" - description: "Identifies which app is making the request. Obtain this\ - \ value from the Keys tab on the app profile via My Apps on the\ - \ developer site. There are two versions of this key: development\ - \ and production." - client_secret: - description: " Obtain this value from the Keys tab on the app profile\ - \ via My Apps on the developer site. There are two versions of this\ - \ key: development and production." - title: "Client Secret" - type: "string" - airbyte_secret: true - refresh_token: - description: "A token used when refreshing the access token." - title: "Refresh Token" - type: "string" - airbyte_secret: true - access_token: - description: "Access token fot making authenticated requests." - title: "Access Token" - type: "string" - airbyte_secret: true - token_expiry_date: - type: "string" - title: "Token Expiry Date" - description: "The date-time when the access token should be refreshed." - format: "date-time" - realm_id: - description: "Labeled Company ID. The Make API Calls panel is populated\ - \ with the realm id and the current access token." - title: "Realm ID" - type: "string" - airbyte_secret: true - start_date: - order: 1 - description: "The default value to use if no bookmark exists for an endpoint\ - \ (rfc3339 date string). E.g, 2021-03-20T00:00:00Z. Any data before this\ - \ date will not be replicated." - title: "Start Date" - type: "string" - format: "date-time" - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - examples: - - "2021-03-20T00:00:00Z" - sandbox: - order: 2 - description: "Determines whether to use the sandbox or production environment." - title: "Sandbox" - type: "boolean" - default: false - sourceType: - title: "quickbooks" - const: "quickbooks" - enum: - - "quickbooks" - order: 0 - type: "string" - source-quickbooks-update: + source-quickbooks-update: title: "Source QuickBooks Spec" type: "object" required: @@ -23713,7 +23413,6 @@ components: type: "boolean" default: false source-dockerhub: - title: "Dockerhub Spec" type: "object" required: - "docker_username" @@ -23721,6 +23420,8 @@ components: properties: docker_username: type: "string" + order: 0 + title: "Docker Username" description: "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ \ API call)" pattern: "^[a-z0-9_\\-]+$" @@ -23734,13 +23435,14 @@ components: order: 0 type: "string" source-dockerhub-update: - title: "Dockerhub Spec" type: "object" required: - "docker_username" properties: docker_username: type: "string" + order: 0 + title: "Docker Username" description: "Username of DockerHub person or organization (for https://hub.docker.com/v2/repositories/USERNAME/\ \ API call)" pattern: "^[a-z0-9_\\-]+$" @@ -24553,29 +24255,18 @@ components: default: "require" replication_method: type: "object" - title: "Replication Method" - description: "Replication method for extracting data from the database." + title: "Update Method" + description: "Configures how data is extracted from the database." order: 9 group: "advanced" + default: "CDC" + display_type: "radio" oneOf: - - title: "Standard (Xmin)" - description: "Xmin replication requires no setup on the DB side but will\ - \ not be able to represent deletions incrementally." - required: - - "method" - properties: - method: - type: "string" - const: "Xmin" - order: 0 - enum: - - "Xmin" - - title: "Logical Replication (CDC)" - description: "Logical replication uses the Postgres write-ahead log (WAL)\ - \ to detect inserts, updates, and deletes. This needs to be configured\ - \ on the source database itself. Only available on Postgres 10 and above.\ - \ Read the docs." + - title: "Read Changes using Write-Ahead Log (CDC)" + description: "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." required: - "method" - "replication_slot" @@ -24646,9 +24337,24 @@ components: - "After loading Data in the destination" default: "After loading Data in the destination" order: 7 - - title: "Standard" - description: "Standard replication requires no setup on the DB side but\ - \ will not be able to represent deletions incrementally." + - title: "Detect Changes with Xmin System Column" + description: "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Only recommended for tables up to 500GB." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." required: - "method" properties: @@ -25000,29 +24706,18 @@ components: default: "require" replication_method: type: "object" - title: "Replication Method" - description: "Replication method for extracting data from the database." + title: "Update Method" + description: "Configures how data is extracted from the database." order: 9 group: "advanced" + default: "CDC" + display_type: "radio" oneOf: - - title: "Standard (Xmin)" - description: "Xmin replication requires no setup on the DB side but will\ - \ not be able to represent deletions incrementally." - required: - - "method" - properties: - method: - type: "string" - const: "Xmin" - order: 0 - enum: - - "Xmin" - - title: "Logical Replication (CDC)" - description: "Logical replication uses the Postgres write-ahead log (WAL)\ - \ to detect inserts, updates, and deletes. This needs to be configured\ - \ on the source database itself. Only available on Postgres 10 and above.\ - \ Read the docs." + - title: "Read Changes using Write-Ahead Log (CDC)" + description: "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source\ + \ database itself. Recommended for tables of any size." required: - "method" - "replication_slot" @@ -25093,9 +24788,24 @@ components: - "After loading Data in the destination" default: "After loading Data in the destination" order: 7 - - title: "Standard" - description: "Standard replication requires no setup on the DB side but\ - \ will not be able to represent deletions incrementally." + - title: "Detect Changes with Xmin System Column" + description: "Recommended - Incrementally reads new inserts and\ + \ updates via Postgres Xmin system column. Only recommended for tables up to 500GB." + required: + - "method" + properties: + method: + type: "string" + const: "Xmin" + order: 0 + enum: + - "Xmin" + - title: "Scan Changes with User Defined Cursor" + description: "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." required: - "method" properties: @@ -26043,7 +25753,6 @@ components: type: "object" required: - "credentials" - - "start_date" - "customer_id" - "sourceType" properties: @@ -26118,7 +25827,8 @@ components: type: "string" title: "Start Date" description: "UTC date in the format YYYY-MM-DD. Any data before this date\ - \ will not be replicated." + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" pattern_descriptor: "YYYY-MM-DD" examples: @@ -26129,7 +25839,7 @@ components: type: "string" title: "End Date" description: "UTC date in the format YYYY-MM-DD. Any data after this date\ - \ will not be replicated." + \ will not be replicated. (Default value of today is used if not set)" pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" pattern_descriptor: "YYYY-MM-DD" examples: @@ -26203,7 +25913,6 @@ components: type: "object" required: - "credentials" - - "start_date" - "customer_id" properties: credentials: @@ -26277,7 +25986,8 @@ components: type: "string" title: "Start Date" description: "UTC date in the format YYYY-MM-DD. Any data before this date\ - \ will not be replicated." + \ will not be replicated. (Default value of two years ago is used if not\ + \ set)" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" pattern_descriptor: "YYYY-MM-DD" examples: @@ -26288,7 +25998,7 @@ components: type: "string" title: "End Date" description: "UTC date in the format YYYY-MM-DD. Any data after this date\ - \ will not be replicated." + \ will not be replicated. (Default value of today is used if not set)" pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" pattern_descriptor: "YYYY-MM-DD" examples: @@ -26355,7 +26065,6 @@ components: type: "object" required: - "site_urls" - - "start_date" - "authorization" - "sourceType" properties: @@ -26365,7 +26074,7 @@ components: type: "string" title: "Website URL Property" description: "The URLs of the website property attached to your GSC account.\ - \ Read more here." examples: - "https://example1.com/" @@ -26374,21 +26083,25 @@ components: start_date: type: "string" title: "Start Date" - description: "UTC date in the format 2017-01-25. Any data before this date\ + description: "UTC date in the format YYYY-MM-DD. Any data before this date\ \ will not be replicated." - examples: - - "2021-01-01" + default: "2021-01-01" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true order: 1 format: "date" end_date: type: "string" title: "End Date" - description: "UTC date in the format 2017-01-25. Any data after this date\ - \ will not be replicated. Must be greater or equal to the start date field." + description: "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." examples: - "2021-12-12" pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" order: 2 format: "date" authorization: @@ -26471,24 +26184,58 @@ components: order: 4 type: "string" title: "Custom Reports" - description: "A JSON array describing the custom reports you want to sync\ - \ from Google Search Console. See the docs for more information about the exact format you can use\ - \ to fill out this field." + airbyte_hidden: true + description: "(DEPRCATED) A JSON array describing the custom reports you\ + \ want to sync from Google Search Console. See our documentation\ + \ for more information on formulating custom reports." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: "A list of dimensions (country, date, device, page, query)" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + minItems: 1 + required: + - "name" + - "dimensions" data_state: type: "string" - title: "Data State" + title: "Data Freshness" enum: - "final" - "all" - description: "If \"final\" or if this parameter is omitted, the returned\ - \ data will include only finalized data. Setting this parameter to \"\ - all\" should not be used with Incremental Sync mode as it may cause data\ - \ loss. If \"all\", data will include fresh data." + description: "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." examples: - "final" + - "all" default: "final" - order: 5 + order: 6 sourceType: title: "google-search-console" const: "google-search-console" @@ -26501,7 +26248,6 @@ components: type: "object" required: - "site_urls" - - "start_date" - "authorization" properties: site_urls: @@ -26510,7 +26256,7 @@ components: type: "string" title: "Website URL Property" description: "The URLs of the website property attached to your GSC account.\ - \ Read more here." examples: - "https://example1.com/" @@ -26519,21 +26265,25 @@ components: start_date: type: "string" title: "Start Date" - description: "UTC date in the format 2017-01-25. Any data before this date\ + description: "UTC date in the format YYYY-MM-DD. Any data before this date\ \ will not be replicated." - examples: - - "2021-01-01" + default: "2021-01-01" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" + always_show: true order: 1 format: "date" end_date: type: "string" title: "End Date" - description: "UTC date in the format 2017-01-25. Any data after this date\ - \ will not be replicated. Must be greater or equal to the start date field." + description: "UTC date in the format YYYY-MM-DD. Any data created after\ + \ this date will not be replicated. Must be greater or equal to the start\ + \ date field. Leaving this field blank will replicate all data from the\ + \ start date onward." examples: - "2021-12-12" pattern: "^$|^[0-9]{4}-[0-9]{2}-[0-9]{2}$" + pattern_descriptor: "YYYY-MM-DD" order: 2 format: "date" authorization: @@ -26616,24 +26366,58 @@ components: order: 4 type: "string" title: "Custom Reports" - description: "A JSON array describing the custom reports you want to sync\ - \ from Google Search Console. See the docs for more information about the exact format you can use\ - \ to fill out this field." + airbyte_hidden: true + description: "(DEPRCATED) A JSON array describing the custom reports you\ + \ want to sync from Google Search Console. See our documentation\ + \ for more information on formulating custom reports." + custom_reports_array: + title: "Custom Reports" + description: "You can add your Custom Analytics report by creating one." + order: 5 + type: "array" + items: + title: "Custom Report Config" + type: "object" + properties: + name: + title: "Name" + description: "The name of the custom report, this name would be used\ + \ as stream name" + type: "string" + dimensions: + title: "Dimensions" + description: "A list of dimensions (country, date, device, page, query)" + type: "array" + items: + title: "ValidEnums" + description: "An enumeration of dimensions." + enum: + - "country" + - "date" + - "device" + - "page" + - "query" + minItems: 1 + required: + - "name" + - "dimensions" data_state: type: "string" - title: "Data State" + title: "Data Freshness" enum: - "final" - "all" - description: "If \"final\" or if this parameter is omitted, the returned\ - \ data will include only finalized data. Setting this parameter to \"\ - all\" should not be used with Incremental Sync mode as it may cause data\ - \ loss. If \"all\", data will include fresh data." + description: "If set to 'final', the returned data will include only finalized,\ + \ stable data. If set to 'all', fresh data will be included. When using\ + \ Incremental sync mode, we do not recommend setting this parameter to\ + \ 'all' as it may cause data loss. More information can be found in our\ + \ full\ + \ documentation." examples: - "final" + - "all" default: "final" - order: 5 + order: 6 source-kyve: title: "KYVE Spec" type: "object" @@ -27123,7 +26907,6 @@ components: type: "object" required: - "shop" - - "start_date" - "sourceType" properties: shop: @@ -27132,7 +26915,7 @@ components: description: "The name of your Shopify store found in the URL. For example,\ \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ \ or 'NAME.myshopify.com'." - pattern: "^(?!https://)(?!http://).*" + pattern: "^(?!https://)(?!https://).*" examples: - "my-store" - "my-store.myshopify.com" @@ -27143,26 +26926,6 @@ components: type: "object" order: 2 oneOf: - - title: "API Password" - description: "API Password Auth" - type: "object" - required: - - "auth_method" - - "api_password" - properties: - auth_method: - type: "string" - const: "api_password" - order: 0 - enum: - - "api_password" - api_password: - type: "string" - title: "API Password" - description: "The API Password for your private application in the\ - \ `Shopify` store." - airbyte_secret: true - order: 1 - type: "object" title: "OAuth2.0" description: "OAuth2.0" @@ -27193,13 +26956,32 @@ components: description: "The Access Token for making authenticated requests." airbyte_secret: true order: 3 + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 start_date: type: "string" title: "Replication Start Date" description: "The date you would like to replicate data from. Format: YYYY-MM-DD.\ \ Any data before this date will not be replicated." - examples: - - "2021-01-01" + default: "2020-01-01" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" format: "date" order: 3 @@ -27215,7 +26997,6 @@ components: type: "object" required: - "shop" - - "start_date" properties: shop: type: "string" @@ -27223,7 +27004,7 @@ components: description: "The name of your Shopify store found in the URL. For example,\ \ if your URL was https://NAME.myshopify.com, then the name would be 'NAME'\ \ or 'NAME.myshopify.com'." - pattern: "^(?!https://)(?!http://).*" + pattern: "^(?!https://)(?!https://).*" examples: - "my-store" - "my-store.myshopify.com" @@ -27234,26 +27015,6 @@ components: type: "object" order: 2 oneOf: - - title: "API Password" - description: "API Password Auth" - type: "object" - required: - - "auth_method" - - "api_password" - properties: - auth_method: - type: "string" - const: "api_password" - order: 0 - enum: - - "api_password" - api_password: - type: "string" - title: "API Password" - description: "The API Password for your private application in the\ - \ `Shopify` store." - airbyte_secret: true - order: 1 - type: "object" title: "OAuth2.0" description: "OAuth2.0" @@ -27284,13 +27045,32 @@ components: description: "The Access Token for making authenticated requests." airbyte_secret: true order: 3 + - title: "API Password" + description: "API Password Auth" + type: "object" + required: + - "auth_method" + - "api_password" + properties: + auth_method: + type: "string" + const: "api_password" + order: 0 + enum: + - "api_password" + api_password: + type: "string" + title: "API Password" + description: "The API Password for your private application in the\ + \ `Shopify` store." + airbyte_secret: true + order: 1 start_date: type: "string" title: "Replication Start Date" description: "The date you would like to replicate data from. Format: YYYY-MM-DD.\ \ Any data before this date will not be replicated." - examples: - - "2021-01-01" + default: "2020-01-01" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" format: "date" order: 3 @@ -27798,34 +27578,369 @@ components: - "start_date" - "access_token" source-s3: - title: "S3 Source Spec" + title: "Config" + description: "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." type: "object" properties: + start_date: + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + file_type: + title: "File Type" + description: "The data file type that is being extracted for a stream." + type: "string" + globs: + title: "Globs" + description: "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + type: "array" + items: + type: "string" + legacy_prefix: + title: "Legacy Prefix" + description: "The path prefix configured in v3 versions of the S3\ + \ connector. This option is deprecated in favor of a single glob." + airbyte_hidden: true + type: "string" + validation_policy: + title: "Validation Policy" + description: "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + primary_key: + title: "Primary Key" + description: "The column or columns (for a composite key) that serves\ + \ as the unique identifier of a record." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: "\"" + type: "string" + escape_char: + title: "Escape Character" + description: "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + type: "object" + true_values: + title: "True Values" + description: "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + inference_type: + title: "Inference Type" + description: "How to infer the types of the columns. If none,\ + \ inference default to strings." + default: "None" + airbyte_hidden: true + enum: + - "None" + - "Primitive Types Only" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + schemaless: + title: "Schemaless" + description: "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "file_type" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + order: 4 + type: "string" dataset: title: "Output Stream Name" - description: "The name of the stream you would like this source to output.\ - \ Can contain letters, numbers, or underscores." + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use streams.name instead. The name of the stream you\ + \ would like this source to output. Can contain letters, numbers, or underscores." pattern: "^([A-Za-z0-9-_]+)$" - order: 0 + order: 100 type: "string" + airbyte_hidden: true path_pattern: title: "Pattern of files to replicate" - description: "A regular expression which tells the connector which files\ - \ to replicate. All files which match this pattern will be replicated.\ - \ Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR\ - \ and SPLIT flags are enabled). Use pattern ** to pick\ - \ up all files." + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use streams.globs instead. A regular expression which\ + \ tells the connector which files to replicate. All files which match\ + \ this pattern will be replicated. Use | to separate multiple patterns.\ + \ See this page to understand pattern syntax (GLOBSTAR and SPLIT\ + \ flags are enabled). Use pattern ** to pick up all files." examples: - "**" - "myFolder/myTableFiles/*.csv|myFolder/myOtherTableFiles/*.csv" - order: 10 + order: 110 type: "string" + airbyte_hidden: true format: title: "File Format" - description: "The format of the files you'd like to replicate" + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use streams.format instead. The format of the files\ + \ you'd like to replicate" default: "csv" - order: 20 + order: 120 type: "object" oneOf: - title: "CSV" @@ -28025,19 +28140,23 @@ components: default: 0 order: 2 type: "integer" + airbyte_hidden: true schema: title: "Manually enforced data schema" - description: "Optionally provide a schema to enforce, as a valid JSON string.\ - \ Ensure this is a mapping of { \"column\" : \"type\" },\ - \ where types are valid { \"column\" : \"type\" }, where types are valid\ + \ JSON Schema datatypes. Leave as {} to auto-infer\ \ the schema." default: "{}" examples: - "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"\ array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}" - order: 30 + order: 130 type: "string" + airbyte_hidden: true provider: title: "S3: Amazon Web Services" type: "object" @@ -28089,10 +28208,13 @@ components: pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 5 type: "string" - required: - - "bucket" - order: 11 - description: "Use this to load files from S3 or S3-compatible services" + required: [] + order: 111 + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use bucket, aws_access_key_id, aws_secret_access_key\ + \ and endpoint instead. Use this to load files from S3 or S3-compatible\ + \ services" + airbyte_hidden: true sourceType: title: "s3" const: "s3" @@ -28101,39 +28223,373 @@ components: order: 0 type: "string" required: - - "dataset" - - "path_pattern" - - "provider" + - "streams" + - "bucket" - "sourceType" source-s3-update: - title: "S3 Source Spec" + title: "Config" + description: "NOTE: When this Spec is changed, legacy_config_transformer.py\ + \ must also be modified to uptake the changes\nbecause it is responsible for\ + \ converting legacy S3 v3 configs into v4 configs using the File-Based CDK." type: "object" properties: + start_date: + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00.000000Z.\ + \ Any file modified before this date will not be replicated." + examples: + - "2021-01-01T00:00:00.000000Z" + format: "date-time" + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{6}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ss.SSSSSSZ" + order: 1 + type: "string" + streams: + title: "The list of streams to sync" + description: "Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their\ + \ format, and how they should be parsed and validated. When sending data\ + \ to warehouse destination such as Snowflake or BigQuery, each stream\ + \ is a separate table." + order: 10 + type: "array" + items: + title: "FileBasedStreamConfig" + type: "object" + properties: + name: + title: "Name" + description: "The name of the stream." + type: "string" + file_type: + title: "File Type" + description: "The data file type that is being extracted for a stream." + type: "string" + globs: + title: "Globs" + description: "The pattern used to specify which files should be selected\ + \ from the file system. For more information on glob pattern matching\ + \ look here." + type: "array" + items: + type: "string" + legacy_prefix: + title: "Legacy Prefix" + description: "The path prefix configured in v3 versions of the S3\ + \ connector. This option is deprecated in favor of a single glob." + airbyte_hidden: true + type: "string" + validation_policy: + title: "Validation Policy" + description: "The name of the validation policy that dictates sync\ + \ behavior when a record does not adhere to the stream schema." + default: "Emit Record" + enum: + - "Emit Record" + - "Skip Record" + - "Wait for Discover" + input_schema: + title: "Input Schema" + description: "The schema that will be used to validate records extracted\ + \ from the file. This will override the stream schema that is auto-detected\ + \ from incoming files." + type: "string" + primary_key: + title: "Primary Key" + description: "The column or columns (for a composite key) that serves\ + \ as the unique identifier of a record." + type: "string" + days_to_sync_if_history_is_full: + title: "Days To Sync If History Is Full" + description: "When the state history of the file store is full, syncs\ + \ will only read files that were last modified in the provided day\ + \ range." + default: 3 + type: "integer" + format: + title: "Format" + description: "The configuration options that are used to alter how\ + \ to read incoming files that deviate from the standard formatting." + type: "object" + oneOf: + - title: "Avro Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "avro" + const: "avro" + type: "string" + enum: + - "avro" + double_as_string: + title: "Convert Double Fields to Strings" + description: "Whether to convert double fields to strings. This\ + \ is recommended if you have decimal numbers with a high degree\ + \ of precision because there can be a loss precision when\ + \ handling floating point numbers." + default: false + type: "boolean" + - title: "CSV Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "csv" + const: "csv" + type: "string" + enum: + - "csv" + delimiter: + title: "Delimiter" + description: "The character delimiting individual cells in the\ + \ CSV data. This may only be a 1-character string. For tab-delimited\ + \ data enter '\\t'." + default: "," + type: "string" + quote_char: + title: "Quote Character" + description: "The character used for quoting CSV values. To\ + \ disallow quoting, make this field blank." + default: "\"" + type: "string" + escape_char: + title: "Escape Character" + description: "The character used for escaping special characters.\ + \ To disallow escaping, leave this field blank." + type: "string" + encoding: + title: "Encoding" + description: "The character encoding of the CSV data. Leave\ + \ blank to default to UTF8. See list of python encodings for allowable\ + \ options." + default: "utf8" + type: "string" + double_quote: + title: "Double Quote" + description: "Whether two quotes in a quoted CSV value denote\ + \ a single quote in the data." + default: true + type: "boolean" + null_values: + title: "Null Values" + description: "A set of case-sensitive strings that should be\ + \ interpreted as null values. For example, if the value 'NA'\ + \ should be interpreted as null, enter 'NA' in this field." + default: [] + type: "array" + items: + type: "string" + uniqueItems: true + strings_can_be_null: + title: "Strings Can Be Null" + description: "Whether strings can be interpreted as null values.\ + \ If true, strings that match the null_values set will be\ + \ interpreted as null. If false, strings that match the null_values\ + \ set will be interpreted as the string itself." + default: true + type: "boolean" + skip_rows_before_header: + title: "Skip Rows Before Header" + description: "The number of rows to skip before the header row.\ + \ For example, if the header row is on the 3rd row, enter\ + \ 2 in this field." + default: 0 + type: "integer" + skip_rows_after_header: + title: "Skip Rows After Header" + description: "The number of rows to skip after the header row." + default: 0 + type: "integer" + header_definition: + title: "CSV Header Definition" + description: "How headers will be defined. `User Provided` assumes\ + \ the CSV does not have a header row and uses the headers\ + \ provided and `Autogenerated` assumes the CSV does not have\ + \ a header row and the CDK will generate headers using for\ + \ `f{i}` where `i` is the index starting from 0. Else, the\ + \ default behavior is to use the header from the CSV file.\ + \ If a user wants to autogenerate or provide column names\ + \ for a CSV having headers, they can skip rows." + default: + header_definition_type: "From CSV" + oneOf: + - title: "From CSV" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "From CSV" + const: "From CSV" + type: "string" + enum: + - "From CSV" + - title: "Autogenerated" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "Autogenerated" + const: "Autogenerated" + type: "string" + enum: + - "Autogenerated" + - title: "User Provided" + type: "object" + properties: + header_definition_type: + title: "Header Definition Type" + default: "User Provided" + const: "User Provided" + type: "string" + enum: + - "User Provided" + column_names: + title: "Column Names" + description: "The column names that will be used while\ + \ emitting the CSV records" + type: "array" + items: + type: "string" + required: + - "column_names" + type: "object" + true_values: + title: "True Values" + description: "A set of case-sensitive strings that should be\ + \ interpreted as true values." + default: + - "y" + - "yes" + - "t" + - "true" + - "on" + - "1" + type: "array" + items: + type: "string" + uniqueItems: true + false_values: + title: "False Values" + description: "A set of case-sensitive strings that should be\ + \ interpreted as false values." + default: + - "n" + - "no" + - "f" + - "false" + - "off" + - "0" + type: "array" + items: + type: "string" + uniqueItems: true + inference_type: + title: "Inference Type" + description: "How to infer the types of the columns. If none,\ + \ inference default to strings." + default: "None" + airbyte_hidden: true + enum: + - "None" + - "Primitive Types Only" + - title: "Jsonl Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "jsonl" + const: "jsonl" + type: "string" + enum: + - "jsonl" + - title: "Parquet Format" + type: "object" + properties: + filetype: + title: "Filetype" + default: "parquet" + const: "parquet" + type: "string" + enum: + - "parquet" + decimal_as_float: + title: "Convert Decimal Fields to Floats" + description: "Whether to convert decimal fields to floats. There\ + \ is a loss of precision when converting decimals to floats,\ + \ so this is not recommended." + default: false + type: "boolean" + schemaless: + title: "Schemaless" + description: "When enabled, syncs will not validate or structure records\ + \ against the stream's schema." + default: false + type: "boolean" + required: + - "name" + - "file_type" + bucket: + title: "Bucket" + description: "Name of the S3 bucket where the file(s) exist." + order: 0 + type: "string" + aws_access_key_id: + title: "AWS Access Key ID" + description: "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 2 + type: "string" + aws_secret_access_key: + title: "AWS Secret Access Key" + description: "In order to access private Buckets stored on AWS S3, this\ + \ connector requires credentials with the proper permissions. If accessing\ + \ publicly available data, this field is not necessary." + airbyte_secret: true + order: 3 + type: "string" + endpoint: + title: "Endpoint" + description: "Endpoint to an S3 compatible service. Leave empty to use AWS." + default: "" + order: 4 + type: "string" dataset: title: "Output Stream Name" - description: "The name of the stream you would like this source to output.\ - \ Can contain letters, numbers, or underscores." + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use streams.name instead. The name of the stream you\ + \ would like this source to output. Can contain letters, numbers, or underscores." pattern: "^([A-Za-z0-9-_]+)$" - order: 0 + order: 100 type: "string" + airbyte_hidden: true path_pattern: title: "Pattern of files to replicate" - description: "A regular expression which tells the connector which files\ - \ to replicate. All files which match this pattern will be replicated.\ - \ Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR\ - \ and SPLIT flags are enabled). Use pattern ** to pick\ - \ up all files." + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use streams.globs instead. A regular expression which\ + \ tells the connector which files to replicate. All files which match\ + \ this pattern will be replicated. Use | to separate multiple patterns.\ + \ See this page to understand pattern syntax (GLOBSTAR and SPLIT\ + \ flags are enabled). Use pattern ** to pick up all files." examples: - "**" - "myFolder/myTableFiles/*.csv|myFolder/myOtherTableFiles/*.csv" - order: 10 + order: 110 type: "string" + airbyte_hidden: true format: title: "File Format" - description: "The format of the files you'd like to replicate" + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use streams.format instead. The format of the files\ + \ you'd like to replicate" default: "csv" - order: 20 + order: 120 type: "object" oneOf: - title: "CSV" @@ -28333,19 +28789,23 @@ components: default: 0 order: 2 type: "integer" + airbyte_hidden: true schema: title: "Manually enforced data schema" - description: "Optionally provide a schema to enforce, as a valid JSON string.\ - \ Ensure this is a mapping of { \"column\" : \"type\" },\ - \ where types are valid { \"column\" : \"type\" }, where types are valid\ + \ JSON Schema datatypes. Leave as {} to auto-infer\ \ the schema." default: "{}" examples: - "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"\ array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}" - order: 30 + order: 130 type: "string" + airbyte_hidden: true provider: title: "S3: Amazon Web Services" type: "object" @@ -28397,14 +28857,16 @@ components: pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" order: 5 type: "string" - required: - - "bucket" - order: 11 - description: "Use this to load files from S3 or S3-compatible services" + required: [] + order: 111 + description: "Deprecated and will be removed soon. Please do not use this\ + \ field anymore and use bucket, aws_access_key_id, aws_secret_access_key\ + \ and endpoint instead. Use this to load files from S3 or S3-compatible\ + \ services" + airbyte_hidden: true required: - - "dataset" - - "path_pattern" - - "provider" + - "streams" + - "bucket" source-azure-blob-storage: title: "AzureBlobStorage Source Spec" type: "object" @@ -28596,7 +29058,6 @@ components: pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" format: "date" source-zendesk-sunshine: - title: "Zendesk Sunshine Spec" type: "object" required: - "start_date" @@ -28604,109 +29065,111 @@ components: - "sourceType" properties: subdomain: - title: "Subdomain" - type: "string" - description: "The subdomain for your Zendesk Account." - start_date: - title: "Start Date" type: "string" - description: "The date from which you'd like to replicate data for Zendesk\ - \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." - pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" - examples: - - "2021-01-01T00:00:00Z" - credentials: - title: "Authorization Method" - type: "object" - oneOf: - - type: "object" - additionalProperties: true - title: "OAuth2.0" - required: - - "auth_method" - - "client_id" - - "client_secret" - - "access_token" - properties: - auth_method: - type: "string" - const: "oauth2.0" - enum: - - "oauth2.0" - default: "oauth2.0" - order: 0 - client_id: - type: "string" - title: "Client ID" - description: "The Client ID of your OAuth application." - airbyte_secret: true - client_secret: - type: "string" - title: "Client Secret" - description: "The Client Secret of your OAuth application." - airbyte_secret: true - access_token: - type: "string" - title: "Access Token" - description: "Long-term access Token for making authenticated requests." - airbyte_secret: true - - type: "object" - additionalProperties: true - title: "API Token" - required: - - "auth_method" - - "api_token" - - "email" - properties: - auth_method: - type: "string" - const: "api_token" - enum: - - "api_token" - default: "api_token" - order: 1 - api_token: - type: "string" - title: "API Token" - description: "API Token. See the docs for information on how to generate this key." - airbyte_secret: true - email: - type: "string" - title: "Email" - description: "The user email for your Zendesk account" - sourceType: - title: "zendesk-sunshine" - const: "zendesk-sunshine" - enum: - - "zendesk-sunshine" order: 0 - type: "string" - source-zendesk-sunshine-update: - title: "Zendesk Sunshine Spec" - type: "object" - required: - - "start_date" - - "subdomain" - properties: - subdomain: title: "Subdomain" - type: "string" description: "The subdomain for your Zendesk Account." start_date: - title: "Start Date" type: "string" + title: "Start date" + format: "date-time" description: "The date from which you'd like to replicate data for Zendesk\ \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" examples: - "2021-01-01T00:00:00Z" + order: 1 + credentials: + title: "Authorization Method" + type: "object" + oneOf: + - type: "object" + title: "OAuth2.0" + required: + - "auth_method" + - "client_id" + - "client_secret" + - "access_token" + properties: + auth_method: + type: "string" + const: "oauth2.0" + enum: + - "oauth2.0" + default: "oauth2.0" + order: 0 + client_id: + type: "string" + title: "Client ID" + description: "The Client ID of your OAuth application." + airbyte_secret: true + client_secret: + type: "string" + title: "Client Secret" + description: "The Client Secret of your OAuth application." + airbyte_secret: true + access_token: + type: "string" + title: "Access Token" + description: "Long-term access Token for making authenticated requests." + airbyte_secret: true + - type: "object" + title: "API Token" + required: + - "auth_method" + - "api_token" + - "email" + properties: + auth_method: + type: "string" + const: "api_token" + enum: + - "api_token" + default: "api_token" + order: 1 + api_token: + type: "string" + title: "API Token" + description: "API Token. See the docs for information on how to generate this key." + airbyte_secret: true + email: + type: "string" + title: "Email" + description: "The user email for your Zendesk account" + sourceType: + title: "zendesk-sunshine" + const: "zendesk-sunshine" + enum: + - "zendesk-sunshine" + order: 0 + type: "string" + source-zendesk-sunshine-update: + type: "object" + required: + - "start_date" + - "subdomain" + properties: + subdomain: + type: "string" + order: 0 + title: "Subdomain" + description: "The subdomain for your Zendesk Account." + start_date: + type: "string" + title: "Start date" + format: "date-time" + description: "The date from which you'd like to replicate data for Zendesk\ + \ Sunshine API, in the format YYYY-MM-DDT00:00:00Z." + pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + examples: + - "2021-01-01T00:00:00Z" + order: 1 credentials: title: "Authorization Method" type: "object" oneOf: - type: "object" - additionalProperties: true title: "OAuth2.0" required: - "auth_method" @@ -28737,7 +29200,6 @@ components: description: "Long-term access Token for making authenticated requests." airbyte_secret: true - type: "object" - additionalProperties: true title: "API Token" required: - "auth_method" @@ -29107,234 +29569,6 @@ components: \ OAuth Token Tools. See the docs to obtain yours." airbyte_secret: true - source-openweather: - title: "Open Weather Spec" - type: "object" - required: - - "appid" - - "lat" - - "lon" - - "sourceType" - properties: - lat: - title: "Latitude" - type: "string" - pattern: "^[-]?\\d{1,2}(\\.\\d+)?$" - examples: - - "45.7603" - - "-21.249107858038816" - description: "Latitude for which you want to get weather condition from.\ - \ (min -90, max 90)" - lon: - title: "Longitude" - type: "string" - pattern: "^[-]?\\d{1,3}(\\.\\d+)?$" - examples: - - "4.835659" - - "-70.39482074115321" - description: "Longitude for which you want to get weather condition from.\ - \ (min -180, max 180)" - appid: - title: "App ID" - type: "string" - description: "Your OpenWeather API Key. See here. The key is case sensitive." - airbyte_secret: true - units: - title: "Units" - type: "string" - description: "Units of measurement. standard, metric and imperial units\ - \ are available. If you do not use the units parameter, standard units\ - \ will be applied by default." - enum: - - "standard" - - "metric" - - "imperial" - examples: - - "standard" - - "metric" - - "imperial" - lang: - title: "Language" - type: "string" - description: "You can use lang parameter to get the output in your language.\ - \ The contents of the description field will be translated. See here for the list\ - \ of supported languages." - enum: - - "af" - - "al" - - "ar" - - "az" - - "bg" - - "ca" - - "cz" - - "da" - - "de" - - "el" - - "en" - - "eu" - - "fa" - - "fi" - - "fr" - - "gl" - - "he" - - "hi" - - "hr" - - "hu" - - "id" - - "it" - - "ja" - - "kr" - - "la" - - "lt" - - "mk" - - "no" - - "nl" - - "pl" - - "pt" - - "pt_br" - - "ro" - - "ru" - - "sv" - - "se" - - "sk" - - "sl" - - "sp" - - "es" - - "sr" - - "th" - - "tr" - - "ua" - - "uk" - - "vi" - - "zh_cn" - - "zh_tw" - - "zu" - examples: - - "en" - - "fr" - - "pt_br" - - "uk" - - "zh_cn" - - "zh_tw" - sourceType: - title: "openweather" - const: "openweather" - enum: - - "openweather" - order: 0 - type: "string" - source-openweather-update: - title: "Open Weather Spec" - type: "object" - required: - - "appid" - - "lat" - - "lon" - properties: - lat: - title: "Latitude" - type: "string" - pattern: "^[-]?\\d{1,2}(\\.\\d+)?$" - examples: - - "45.7603" - - "-21.249107858038816" - description: "Latitude for which you want to get weather condition from.\ - \ (min -90, max 90)" - lon: - title: "Longitude" - type: "string" - pattern: "^[-]?\\d{1,3}(\\.\\d+)?$" - examples: - - "4.835659" - - "-70.39482074115321" - description: "Longitude for which you want to get weather condition from.\ - \ (min -180, max 180)" - appid: - title: "App ID" - type: "string" - description: "Your OpenWeather API Key. See here. The key is case sensitive." - airbyte_secret: true - units: - title: "Units" - type: "string" - description: "Units of measurement. standard, metric and imperial units\ - \ are available. If you do not use the units parameter, standard units\ - \ will be applied by default." - enum: - - "standard" - - "metric" - - "imperial" - examples: - - "standard" - - "metric" - - "imperial" - lang: - title: "Language" - type: "string" - description: "You can use lang parameter to get the output in your language.\ - \ The contents of the description field will be translated. See here for the list\ - \ of supported languages." - enum: - - "af" - - "al" - - "ar" - - "az" - - "bg" - - "ca" - - "cz" - - "da" - - "de" - - "el" - - "en" - - "eu" - - "fa" - - "fi" - - "fr" - - "gl" - - "he" - - "hi" - - "hr" - - "hu" - - "id" - - "it" - - "ja" - - "kr" - - "la" - - "lt" - - "mk" - - "no" - - "nl" - - "pl" - - "pt" - - "pt_br" - - "ro" - - "ru" - - "sv" - - "se" - - "sk" - - "sl" - - "sp" - - "es" - - "sr" - - "th" - - "tr" - - "ua" - - "uk" - - "vi" - - "zh_cn" - - "zh_tw" - - "zu" - examples: - - "en" - - "fr" - - "pt_br" - - "uk" - - "zh_cn" - - "zh_tw" source-whisky-hunter: title: "Whisky Hunter Spec" type: "object" @@ -30577,6 +30811,8 @@ components: type: "string" title: "Authentication Method" const: "oauth2_access_token" + examples: + - "oauth2_access_token" order: 0 enum: - "oauth2_access_token" @@ -30588,6 +30824,15 @@ components: \ knowns as scopes." type: "string" airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false sourceType: title: "auth0" const: "auth0" @@ -30663,6 +30908,8 @@ components: type: "string" title: "Authentication Method" const: "oauth2_access_token" + examples: + - "oauth2_access_token" order: 0 enum: - "oauth2_access_token" @@ -30674,6 +30921,15 @@ components: \ knowns as scopes." type: "string" airbyte_secret: true + start_date: + type: "string" + title: "Start Date" + description: "UTC date and time in the format 2017-01-25T00:00:00Z. Any\ + \ data before this date will not be replicated." + examples: + - "2023-08-05T00:43:59.244Z" + default: "2023-08-05T00:43:59.244Z" + airbyte_secret: false source-linnworks: title: "Linnworks Spec" type: "object" @@ -31840,6 +32096,7 @@ components: description: "Configures how data is extracted from the database." order: 8 default: "CDC" + display_type: "radio" oneOf: - title: "Read Changes using Binary Log (CDC)" description: "Recommended - Incrementally reads new inserts, updates,\ @@ -32186,6 +32443,7 @@ components: description: "Configures how data is extracted from the database." order: 8 default: "CDC" + display_type: "radio" oneOf: - title: "Read Changes using Binary Log (CDC)" description: "Recommended - Incrementally reads new inserts, updates,\ @@ -32498,9 +32756,18 @@ components: title: "Apify Dataset Spec" type: "object" required: - - "datasetId" + - "token" - "sourceType" properties: + token: + title: "Personal API tokens" + description: "Your application's Client Secret. You can find this value\ + \ on the console\ + \ integrations tab after you login." + type: "string" + examples: + - "Personal API tokens" + airbyte_secret: true datasetId: type: "string" title: "Dataset ID" @@ -32522,8 +32789,17 @@ components: title: "Apify Dataset Spec" type: "object" required: - - "datasetId" + - "token" properties: + token: + title: "Personal API tokens" + description: "Your application's Client Secret. You can find this value\ + \ on the console\ + \ integrations tab after you login." + type: "string" + examples: + - "Personal API tokens" + airbyte_secret: true datasetId: type: "string" title: "Dataset ID" @@ -34264,7 +34540,6 @@ components: required: - "client_secret" - "account_id" - - "start_date" - "sourceType" properties: account_id: @@ -34286,6 +34561,7 @@ components: pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" examples: - "2017-01-25T00:00:00Z" format: "date-time" @@ -34297,7 +34573,10 @@ components: minimum: 0 description: "When set, the connector will always re-export data from the\ \ past N days, where N is the value set here. This is useful if your data\ - \ is frequently updated after creation. More info here" order: 3 slice_range: @@ -34330,7 +34609,6 @@ components: required: - "client_secret" - "account_id" - - "start_date" properties: account_id: type: "string" @@ -34351,6 +34629,7 @@ components: pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" description: "UTC date and time in the format 2017-01-25T00:00:00Z. Only\ \ data generated after this date will be replicated." + default: "2017-01-25T00:00:00Z" examples: - "2017-01-25T00:00:00Z" format: "date-time" @@ -34362,7 +34641,10 @@ components: minimum: 0 description: "When set, the connector will always re-export data from the\ \ past N days, where N is the value set here. This is useful if your data\ - \ is frequently updated after creation. More info here" order: 3 slice_range: @@ -34468,16 +34750,6 @@ components: \ the spreadsheet, then click 'Copy link'." examples: - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" - row_batch_size: - type: "integer" - title: "Row Batch Size" - description: "The number of rows fetched when making a Google Sheet API\ - \ call. Defaults to 200." - default: 200 - examples: - - 50 - - 100 - - 200 names_conversion: type: "boolean" title: "Convert Column Names to SQL-Compliant Format" @@ -34566,16 +34838,6 @@ components: \ the spreadsheet, then click 'Copy link'." examples: - "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" - row_batch_size: - type: "integer" - title: "Row Batch Size" - description: "The number of rows fetched when making a Google Sheet API\ - \ call. Defaults to 200." - default: 200 - examples: - - 50 - - 100 - - 200 names_conversion: type: "boolean" title: "Convert Column Names to SQL-Compliant Format" @@ -35285,6 +35547,19 @@ components: description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." examples: - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 sourceType: title: "posthog" const: "posthog" @@ -35321,6 +35596,19 @@ components: description: "Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com)." examples: - "https://posthog.example.com" + events_time_step: + type: "integer" + order: 3 + default: 30 + minimum: 1 + maximum: 91 + title: "Events stream slice step size (in days)" + description: "Set lower value in case of failing long running sync of events\ + \ stream." + examples: + - 30 + - 10 + - 5 source-getlago: title: "Getlago Spec" type: "object" @@ -36015,19 +36303,25 @@ components: client_id: type: "string" title: "Client ID" - description: "The client ID of the LinkedIn Ads developer application." + description: "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." airbyte_secret: true client_secret: type: "string" - title: "Client secret" - description: "The client secret the LinkedIn Ads developer application." + title: "Client Secret" + description: "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." airbyte_secret: true refresh_token: type: "string" - title: "Refresh token" - description: "The key to refresh the expired access token." + title: "Refresh Token" + description: "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." airbyte_secret: true - - title: "Access token" + - title: "Access Token" type: "object" required: - "access_token" @@ -36039,16 +36333,17 @@ components: - "access_token" access_token: type: "string" - title: "Access token" - description: "The token value generated using the authentication code.\ - \ See the docs to obtain yours." + title: "Access Token" + description: "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." airbyte_secret: true start_date: type: "string" - title: "Start date" + title: "Start Date" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - description: "UTC date in the format 2020-09-17. Any data before this date\ + pattern_descriptor: "YYYY-MM-DD" + description: "UTC date in the format YYYY-MM-DD. Any data before this date\ \ will not be replicated." examples: - "2021-05-17" @@ -36056,15 +36351,17 @@ components: account_ids: title: "Account IDs" type: "array" - description: "Specify the account IDs separated by a space, to pull the\ - \ data from. Leave empty, if you want to pull the data from all associated\ - \ accounts. See the LinkedIn Ads docs for more info." + description: "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." items: type: "integer" + examples: + - "123456789" default: [] ad_analytics_reports: - title: "Custom Ad Analytics reports" + title: "Custom Ad Analytics Reports" type: "array" items: type: "object" @@ -36076,12 +36373,15 @@ components: - "time_granularity" properties: name: - title: "Name" - description: "The name for the report" + title: "Report Name" + description: "The name for the custom report." type: "string" pivot_by: - title: "Pivot By" - description: "Select value from list to pivot by" + title: "Pivot Category" + description: "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." type: "string" enum: - "COMPANY" @@ -36106,11 +36406,13 @@ components: - "PLACEMENT_NAME" - "IMPRESSION_DEVICE_TYPE" time_granularity: - title: "Time granularity" - description: "Set time granularity for report: \nALL - Results grouped\ - \ into a single result across the entire time range of the report.\n\ - DAILY - Results grouped by day.\nMONTHLY - Results grouped by month.\n\ - YEARLY - Results grouped by year." + title: "Time Granularity" + description: "Choose how to group the data in your report by time.\ + \ The options are:
- 'ALL': A single result summarizing the entire\ + \ time range.
- 'DAILY': Group results by each day.
- 'MONTHLY':\ + \ Group results by each month.
- 'YEARLY': Group results by each\ + \ year.
Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." type: "string" enum: - "ALL" @@ -36150,19 +36452,25 @@ components: client_id: type: "string" title: "Client ID" - description: "The client ID of the LinkedIn Ads developer application." + description: "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." airbyte_secret: true client_secret: type: "string" - title: "Client secret" - description: "The client secret the LinkedIn Ads developer application." + title: "Client Secret" + description: "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." airbyte_secret: true refresh_token: type: "string" - title: "Refresh token" - description: "The key to refresh the expired access token." + title: "Refresh Token" + description: "The key to refresh the expired access token. Refer to\ + \ our documentation\ + \ for more information." airbyte_secret: true - - title: "Access token" + - title: "Access Token" type: "object" required: - "access_token" @@ -36174,16 +36482,17 @@ components: - "access_token" access_token: type: "string" - title: "Access token" - description: "The token value generated using the authentication code.\ - \ See the docs to obtain yours." + title: "Access Token" + description: "The access token generated for your developer application.\ + \ Refer to our documentation\ + \ for more information." airbyte_secret: true start_date: type: "string" - title: "Start date" + title: "Start Date" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}$" - description: "UTC date in the format 2020-09-17. Any data before this date\ + pattern_descriptor: "YYYY-MM-DD" + description: "UTC date in the format YYYY-MM-DD. Any data before this date\ \ will not be replicated." examples: - "2021-05-17" @@ -36191,15 +36500,17 @@ components: account_ids: title: "Account IDs" type: "array" - description: "Specify the account IDs separated by a space, to pull the\ - \ data from. Leave empty, if you want to pull the data from all associated\ - \ accounts. See the LinkedIn Ads docs for more info." + description: "Specify the account IDs to pull data from, separated by a\ + \ space. Leave this field empty if you want to pull the data from all\ + \ accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs." items: type: "integer" + examples: + - "123456789" default: [] ad_analytics_reports: - title: "Custom Ad Analytics reports" + title: "Custom Ad Analytics Reports" type: "array" items: type: "object" @@ -36211,12 +36522,15 @@ components: - "time_granularity" properties: name: - title: "Name" - description: "The name for the report" + title: "Report Name" + description: "The name for the custom report." type: "string" pivot_by: - title: "Pivot By" - description: "Select value from list to pivot by" + title: "Pivot Category" + description: "Choose a category to pivot your analytics report around.\ + \ This selection will organize your data based on the chosen attribute,\ + \ allowing you to analyze trends and performance from different\ + \ perspectives." type: "string" enum: - "COMPANY" @@ -36241,11 +36555,13 @@ components: - "PLACEMENT_NAME" - "IMPRESSION_DEVICE_TYPE" time_granularity: - title: "Time granularity" - description: "Set time granularity for report: \nALL - Results grouped\ - \ into a single result across the entire time range of the report.\n\ - DAILY - Results grouped by day.\nMONTHLY - Results grouped by month.\n\ - YEARLY - Results grouped by year." + title: "Time Granularity" + description: "Choose how to group the data in your report by time.\ + \ The options are:
- 'ALL': A single result summarizing the entire\ + \ time range.
- 'DAILY': Group results by each day.
- 'MONTHLY':\ + \ Group results by each month.
- 'YEARLY': Group results by each\ + \ year.
Selecting a time grouping helps you analyze trends and\ + \ patterns over different time periods." type: "string" enum: - "ALL" @@ -37330,7 +37646,6 @@ components: title: "Source Zendesk Support Spec" type: "object" required: - - "start_date" - "subdomain" - "sourceType" properties: @@ -37343,19 +37658,23 @@ components: examples: - "2020-10-15T00:00:00Z" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" format: "date-time" + order: 2 subdomain: type: "string" title: "Subdomain" description: "This is your unique Zendesk subdomain that can be found in\ \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 credentials: title: "Authentication" type: "object" description: "Zendesk allows two authentication methods. We recommend using\ \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ \ Source users." + order: 1 oneOf: - title: "OAuth2.0" type: "object" @@ -37429,7 +37748,6 @@ components: title: "Source Zendesk Support Spec" type: "object" required: - - "start_date" - "subdomain" properties: start_date: @@ -37441,19 +37759,23 @@ components: examples: - "2020-10-15T00:00:00Z" pattern: "^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}Z$" + pattern_descriptor: "YYYY-MM-DDTHH:mm:ssZ" format: "date-time" + order: 2 subdomain: type: "string" title: "Subdomain" description: "This is your unique Zendesk subdomain that can be found in\ \ your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/,\ \ MY_SUBDOMAIN is the value of your subdomain." + order: 0 credentials: title: "Authentication" type: "object" description: "Zendesk allows two authentication methods. We recommend using\ \ `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open\ \ Source users." + order: 1 oneOf: - title: "OAuth2.0" type: "object" @@ -39120,6 +39442,16 @@ components: \ href=\"https://developers.intercom.com/building-apps/docs/authentication-types#how-to-get-your-access-token\"\ >Intercom docs for more information." airbyte_secret: true + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true sourceType: title: "intercom" const: "intercom" @@ -39150,6 +39482,16 @@ components: \ href=\"https://developers.intercom.com/building-apps/docs/authentication-types#how-to-get-your-access-token\"\ >Intercom docs for more information." airbyte_secret: true + client_id: + title: "Client Id" + type: "string" + description: "Client Id for your Intercom application." + airbyte_secret: true + client_secret: + title: "Client Secret" + type: "string" + description: "Client Secret for your Intercom application." + airbyte_secret: true source-rki-covid: title: "RKI Covid Spec" type: "object" @@ -39257,7 +39599,7 @@ components: airbyte_secret: true order: 0 since: - title: "Date Since" + title: "Replication Start Date" type: "string" description: "The date from which you'd like to replicate the data" examples: @@ -39287,7 +39629,7 @@ components: airbyte_secret: true order: 0 since: - title: "Date Since" + title: "Replication Start Date" type: "string" description: "The date from which you'd like to replicate the data" examples: @@ -39687,8 +40029,8 @@ components: properties: api_key: type: "string" - title: "API key" - description: "Lemlist API key." + title": "API key" + description: "Lemlist API key," airbyte_secret: true sourceType: title: "lemlist" @@ -39705,8 +40047,8 @@ components: properties: api_key: type: "string" - title: "API key" - description: "Lemlist API key." + title": "API key" + description: "Lemlist API key," airbyte_secret: true source-pexels-api: title: "Pexel API Spec" @@ -43268,11 +43610,22 @@ components: profiles: title: "Profile IDs" description: "Profile IDs you want to fetch data for. See docs for more details." + >docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." order: 6 type: "array" items: type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" state_filter: title: "State Filter" description: "Reflects the state of the Display, Product, and Brand Campaign\ @@ -43286,7 +43639,7 @@ components: - "archived" type: "array" uniqueItems: true - order: 7 + order: 8 look_back_window: title: "Look Back Window" description: "The amount of days to go back in time to get the updated data\ @@ -43296,7 +43649,7 @@ components: - 10 type: "integer" default: 3 - order: 8 + order: 9 report_record_types: title: "Report Record Types" description: "Optional configuration which accepts an array of string of\ @@ -43317,7 +43670,7 @@ components: - "targets" type: "array" uniqueItems: true - order: 9 + order: 10 sourceType: title: "amazon-ads" const: "amazon-ads" @@ -43386,11 +43739,22 @@ components: profiles: title: "Profile IDs" description: "Profile IDs you want to fetch data for. See docs for more details." + >docs for more details. Note: If Marketplace IDs are also selected,\ + \ profiles will be selected if they match the Profile ID OR the Marketplace\ + \ ID." order: 6 type: "array" items: type: "integer" + marketplace_ids: + title: "Marketplace IDs" + description: "Marketplace IDs you want to fetch data for. Note: If Profile\ + \ IDs are also selected, profiles will be selected if they match the Profile\ + \ ID OR the Marketplace ID." + order: 7 + type: "array" + items: + type: "string" state_filter: title: "State Filter" description: "Reflects the state of the Display, Product, and Brand Campaign\ @@ -43404,7 +43768,7 @@ components: - "archived" type: "array" uniqueItems: true - order: 7 + order: 8 look_back_window: title: "Look Back Window" description: "The amount of days to go back in time to get the updated data\ @@ -43414,7 +43778,7 @@ components: - 10 type: "integer" default: 3 - order: 8 + order: 9 report_record_types: title: "Report Record Types" description: "Optional configuration which accepts an array of string of\ @@ -43435,7 +43799,7 @@ components: - "targets" type: "array" uniqueItems: true - order: 9 + order: 10 required: - "client_id" - "client_secret" @@ -46169,18 +46533,67 @@ components: order: 7 replication_method: type: "object" - title: "Replication Method" - description: "The replication method used for extracting data from the database.\ - \ STANDARD replication requires no setup on the DB side but will not be\ - \ able to represent deletions incrementally. CDC uses {TBC} to detect\ - \ inserts, updates, and deletes. This needs to be configured on the source\ - \ database itself." - default: "STANDARD" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" order: 8 oneOf: - - title: "Standard" - description: "Standard replication requires no setup on the DB side but\ - \ will not be able to represent deletions incrementally." + - title: "Read Changes using Change Data Capture (CDC)" + description: "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." + required: + - "method" + properties: + method: + type: "string" + const: "CDC" + order: 0 + enum: + - "CDC" + data_to_sync: + title: "Data to Sync" + type: "string" + default: "Existing and New" + enum: + - "Existing and New" + - "New Changes Only" + description: "What data should be synced under the CDC. \"Existing\ + \ and New\" will read existing data as a snapshot, and sync new\ + \ changes through CDC. \"New Changes Only\" will skip the initial\ + \ snapshot, and only sync new changes through CDC." + order: 1 + snapshot_isolation: + title: "Initial Snapshot Isolation Level" + type: "string" + default: "Snapshot" + enum: + - "Snapshot" + - "Read Committed" + description: "Existing data in the database are synced through an\ + \ initial snapshot. This parameter controls the isolation level\ + \ that will be used during the initial snapshotting. If you choose\ + \ the \"Snapshot\" level, you must enable the snapshot isolation mode on the database." + order: 2 + initial_waiting_seconds: + type: "integer" + title: "Initial Waiting Time in Seconds (Advanced)" + description: "The amount of time the connector will wait when it launches\ + \ to determine if there is new data to sync or not. Defaults to\ + \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ + \ initial waiting time." + default: 300 + min: 120 + max: 1200 + order: 3 + - title: "Scan Changes with User Defined Cursor" + description: "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." required: - "method" properties: @@ -46190,9 +46603,227 @@ components: order: 0 enum: - "STANDARD" - - title: "Logical Replication (CDC)" - description: "CDC uses {TBC} to detect inserts, updates, and deletes.\ - \ This needs to be configured on the source database itself." + tunnel_method: + type: "object" + title: "SSH Tunnel Method" + description: "Whether to initiate an SSH tunnel before connecting to the\ + \ database, and if so, which kind of authentication to use." + oneOf: + - title: "No Tunnel" + required: + - "tunnel_method" + properties: + tunnel_method: + description: "No ssh tunnel needed to connect to database" + type: "string" + const: "NO_TUNNEL" + order: 0 + enum: + - "NO_TUNNEL" + - title: "SSH Key Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "ssh_key" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and ssh key" + type: "string" + const: "SSH_KEY_AUTH" + order: 0 + enum: + - "SSH_KEY_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host." + type: "string" + order: 3 + ssh_key: + title: "SSH Private Key" + description: "OS-level user account ssh key credentials in RSA PEM\ + \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" + type: "string" + airbyte_secret: true + multiline: true + order: 4 + - title: "Password Authentication" + required: + - "tunnel_method" + - "tunnel_host" + - "tunnel_port" + - "tunnel_user" + - "tunnel_user_password" + properties: + tunnel_method: + description: "Connect through a jump server tunnel host using username\ + \ and password authentication" + type: "string" + const: "SSH_PASSWORD_AUTH" + order: 0 + enum: + - "SSH_PASSWORD_AUTH" + tunnel_host: + title: "SSH Tunnel Jump Server Host" + description: "Hostname of the jump server host that allows inbound\ + \ ssh tunnel." + type: "string" + order: 1 + tunnel_port: + title: "SSH Connection Port" + description: "Port on the proxy/jump server that accepts inbound ssh\ + \ connections." + type: "integer" + minimum: 0 + maximum: 65536 + default: 22 + examples: + - "22" + order: 2 + tunnel_user: + title: "SSH Login Username" + description: "OS-level username for logging into the jump server host" + type: "string" + order: 3 + tunnel_user_password: + title: "Password" + description: "OS-level password for logging into the jump server host" + type: "string" + airbyte_secret: true + order: 4 + sourceType: + title: "mssql" + const: "mssql" + enum: + - "mssql" + order: 0 + type: "string" + source-mssql-update: + title: "MSSQL Source Spec" + type: "object" + required: + - "host" + - "port" + - "database" + - "username" + properties: + host: + description: "The hostname of the database." + title: "Host" + type: "string" + order: 0 + port: + description: "The port of the database." + title: "Port" + type: "integer" + minimum: 0 + maximum: 65536 + examples: + - "1433" + order: 1 + database: + description: "The name of the database." + title: "Database" + type: "string" + examples: + - "master" + order: 2 + schemas: + title: "Schemas" + description: "The list of schemas to sync from. Defaults to user. Case sensitive." + type: "array" + items: + type: "string" + minItems: 0 + uniqueItems: true + default: + - "dbo" + order: 3 + username: + description: "The username which is used to access the database." + title: "Username" + type: "string" + order: 4 + password: + description: "The password associated with the username." + title: "Password" + type: "string" + airbyte_secret: true + order: 5 + jdbc_url_params: + title: "JDBC URL Params" + description: "Additional properties to pass to the JDBC URL string when\ + \ connecting to the database formatted as 'key=value' pairs separated\ + \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." + type: "string" + order: 6 + ssl_method: + title: "SSL Method" + type: "object" + description: "The encryption method which is used when communicating with\ + \ the database." + order: 7 + oneOf: + - title: "Encrypted (trust server certificate)" + description: "Use the certificate provided by the server without verification.\ + \ (For testing purposes only!)" + required: + - "ssl_method" + properties: + ssl_method: + type: "string" + const: "encrypted_trust_server_certificate" + enum: + - "encrypted_trust_server_certificate" + - title: "Encrypted (verify certificate)" + description: "Verify and use the certificate provided by the server." + required: + - "ssl_method" + - "trustStoreName" + - "trustStorePassword" + properties: + ssl_method: + type: "string" + const: "encrypted_verify_certificate" + enum: + - "encrypted_verify_certificate" + hostNameInCertificate: + title: "Host Name In Certificate" + type: "string" + description: "Specifies the host name of the server. The value of\ + \ this property must match the subject property of the certificate." + order: 7 + replication_method: + type: "object" + title: "Update Method" + description: "Configures how data is extracted from the database." + default: "CDC" + display_type: "radio" + order: 8 + oneOf: + - title: "Read Changes using Change Data Capture (CDC)" + description: "Recommended - Incrementally reads new inserts, updates,\ + \ and deletes using the SQL Server's change data capture feature. This must be enabled on your database." required: - "method" properties: @@ -46239,229 +46870,11 @@ components: min: 120 max: 1200 order: 3 - tunnel_method: - type: "object" - title: "SSH Tunnel Method" - description: "Whether to initiate an SSH tunnel before connecting to the\ - \ database, and if so, which kind of authentication to use." - oneOf: - - title: "No Tunnel" - required: - - "tunnel_method" - properties: - tunnel_method: - description: "No ssh tunnel needed to connect to database" - type: "string" - const: "NO_TUNNEL" - order: 0 - enum: - - "NO_TUNNEL" - - title: "SSH Key Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "ssh_key" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and ssh key" - type: "string" - const: "SSH_KEY_AUTH" - order: 0 - enum: - - "SSH_KEY_AUTH" - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host." - type: "string" - order: 3 - ssh_key: - title: "SSH Private Key" - description: "OS-level user account ssh key credentials in RSA PEM\ - \ format ( created with ssh-keygen -t rsa -m PEM -f myuser_rsa )" - type: "string" - airbyte_secret: true - multiline: true - order: 4 - - title: "Password Authentication" - required: - - "tunnel_method" - - "tunnel_host" - - "tunnel_port" - - "tunnel_user" - - "tunnel_user_password" - properties: - tunnel_method: - description: "Connect through a jump server tunnel host using username\ - \ and password authentication" - type: "string" - const: "SSH_PASSWORD_AUTH" - order: 0 - enum: - - "SSH_PASSWORD_AUTH" - tunnel_host: - title: "SSH Tunnel Jump Server Host" - description: "Hostname of the jump server host that allows inbound\ - \ ssh tunnel." - type: "string" - order: 1 - tunnel_port: - title: "SSH Connection Port" - description: "Port on the proxy/jump server that accepts inbound ssh\ - \ connections." - type: "integer" - minimum: 0 - maximum: 65536 - default: 22 - examples: - - "22" - order: 2 - tunnel_user: - title: "SSH Login Username" - description: "OS-level username for logging into the jump server host" - type: "string" - order: 3 - tunnel_user_password: - title: "Password" - description: "OS-level password for logging into the jump server host" - type: "string" - airbyte_secret: true - order: 4 - sourceType: - title: "mssql" - const: "mssql" - enum: - - "mssql" - order: 0 - type: "string" - source-mssql-update: - title: "MSSQL Source Spec" - type: "object" - required: - - "host" - - "port" - - "database" - - "username" - properties: - host: - description: "The hostname of the database." - title: "Host" - type: "string" - order: 0 - port: - description: "The port of the database." - title: "Port" - type: "integer" - minimum: 0 - maximum: 65536 - examples: - - "1433" - order: 1 - database: - description: "The name of the database." - title: "Database" - type: "string" - examples: - - "master" - order: 2 - schemas: - title: "Schemas" - description: "The list of schemas to sync from. Defaults to user. Case sensitive." - type: "array" - items: - type: "string" - minItems: 0 - uniqueItems: true - default: - - "dbo" - order: 3 - username: - description: "The username which is used to access the database." - title: "Username" - type: "string" - order: 4 - password: - description: "The password associated with the username." - title: "Password" - type: "string" - airbyte_secret: true - order: 5 - jdbc_url_params: - title: "JDBC URL Params" - description: "Additional properties to pass to the JDBC URL string when\ - \ connecting to the database formatted as 'key=value' pairs separated\ - \ by the symbol '&'. (example: key1=value1&key2=value2&key3=value3)." - type: "string" - order: 6 - ssl_method: - title: "SSL Method" - type: "object" - description: "The encryption method which is used when communicating with\ - \ the database." - order: 7 - oneOf: - - title: "Encrypted (trust server certificate)" - description: "Use the certificate provided by the server without verification.\ - \ (For testing purposes only!)" - required: - - "ssl_method" - properties: - ssl_method: - type: "string" - const: "encrypted_trust_server_certificate" - enum: - - "encrypted_trust_server_certificate" - - title: "Encrypted (verify certificate)" - description: "Verify and use the certificate provided by the server." - required: - - "ssl_method" - - "trustStoreName" - - "trustStorePassword" - properties: - ssl_method: - type: "string" - const: "encrypted_verify_certificate" - enum: - - "encrypted_verify_certificate" - hostNameInCertificate: - title: "Host Name In Certificate" - type: "string" - description: "Specifies the host name of the server. The value of\ - \ this property must match the subject property of the certificate." - order: 7 - replication_method: - type: "object" - title: "Replication Method" - description: "The replication method used for extracting data from the database.\ - \ STANDARD replication requires no setup on the DB side but will not be\ - \ able to represent deletions incrementally. CDC uses {TBC} to detect\ - \ inserts, updates, and deletes. This needs to be configured on the source\ - \ database itself." - default: "STANDARD" - order: 8 - oneOf: - - title: "Standard" - description: "Standard replication requires no setup on the DB side but\ - \ will not be able to represent deletions incrementally." + - title: "Scan Changes with User Defined Cursor" + description: "Incrementally detects new inserts and updates using the\ + \ cursor column chosen when configuring a connection (e.g. created_at,\ + \ updated_at)." required: - "method" properties: @@ -46471,55 +46884,6 @@ components: order: 0 enum: - "STANDARD" - - title: "Logical Replication (CDC)" - description: "CDC uses {TBC} to detect inserts, updates, and deletes.\ - \ This needs to be configured on the source database itself." - required: - - "method" - properties: - method: - type: "string" - const: "CDC" - order: 0 - enum: - - "CDC" - data_to_sync: - title: "Data to Sync" - type: "string" - default: "Existing and New" - enum: - - "Existing and New" - - "New Changes Only" - description: "What data should be synced under the CDC. \"Existing\ - \ and New\" will read existing data as a snapshot, and sync new\ - \ changes through CDC. \"New Changes Only\" will skip the initial\ - \ snapshot, and only sync new changes through CDC." - order: 1 - snapshot_isolation: - title: "Initial Snapshot Isolation Level" - type: "string" - default: "Snapshot" - enum: - - "Snapshot" - - "Read Committed" - description: "Existing data in the database are synced through an\ - \ initial snapshot. This parameter controls the isolation level\ - \ that will be used during the initial snapshotting. If you choose\ - \ the \"Snapshot\" level, you must enable the snapshot isolation mode on the database." - order: 2 - initial_waiting_seconds: - type: "integer" - title: "Initial Waiting Time in Seconds (Advanced)" - description: "The amount of time the connector will wait when it launches\ - \ to determine if there is new data to sync or not. Defaults to\ - \ 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about\ - \ initial waiting time." - default: 300 - min: 120 - max: 1200 - order: 3 tunnel_method: type: "object" title: "SSH Tunnel Method" @@ -48248,7 +48612,7 @@ components: title: "Client Secret" description: "The Client Secret of your OAuth application." airbyte_secret: true - title: "Zendesk Sunshine Spec" + title: null snapchat-marketing: properties: client_id: @@ -48454,12 +48818,16 @@ components: client_id: type: "string" title: "Client ID" - description: "The client ID of the LinkedIn Ads developer application." + description: "The client ID of your developer application. Refer to\ + \ our documentation\ + \ for more information." airbyte_secret: true client_secret: type: "string" - title: "Client secret" - description: "The client secret the LinkedIn Ads developer application." + title: "Client Secret" + description: "The client secret of your developer application. Refer\ + \ to our documentation\ + \ for more information." airbyte_secret: true title: "Linkedin Ads Spec" pinterest: @@ -48564,11 +48932,15 @@ components: intercom: properties: client_id: + title: "Client Id" type: "string" - name: "client_id" + description: "Client Id for your Intercom application." + airbyte_secret: true client_secret: + title: "Client Secret" type: "string" - name: "client_secret" + description: "Client Secret for your Intercom application." + airbyte_secret: true title: "Source Intercom Spec" typeform: properties: @@ -52782,18 +53154,11 @@ components: title: "JDBC URL Params" type: "string" order: 7 - use_1s1t_format: - type: "boolean" - description: "(Beta) Use Destinations V2. Contact Airbyte Support to participate\ - \ in the beta program." - title: "Use Destinations V2 (Early Access)" - order: 10 raw_data_schema: type: "string" - description: "(Beta) The schema to write raw tables into" - title: "Destinations V2 Raw Table Schema (Early Access)" - order: 11 + description: "The schema to write raw tables into" + title: "Destinations V2 Raw Table Schema" + order: 10 destinationType: title: "snowflake" const: "snowflake" @@ -52955,18 +53320,11 @@ components: title: "JDBC URL Params" type: "string" order: 7 - use_1s1t_format: - type: "boolean" - description: "(Beta) Use Destinations V2. Contact Airbyte Support to participate\ - \ in the beta program." - title: "Use Destinations V2 (Early Access)" - order: 10 raw_data_schema: type: "string" - description: "(Beta) The schema to write raw tables into" - title: "Destinations V2 Raw Table Schema (Early Access)" - order: 11 + description: "The schema to write raw tables into" + title: "Destinations V2 Raw Table Schema" + order: 10 destination-databricks: title: "Databricks Lakehouse Destination Spec" type: "object" @@ -54207,6 +54565,536 @@ components: type: "boolean" default: false order: 12 + destination-milvus: + title: "Milvus Destination Config" + type: "object" + properties: + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + required: + - "chunk_size" + group: "processing" + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + description: "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + description: "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + required: + - "token" + description: "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + description: "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + destinationType: + title: "milvus" + const: "milvus" + enum: + - "milvus" + order: 0 + type: "string" + required: + - "processing" + - "embedding" + - "indexing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + destination-milvus-update: + title: "Milvus Destination Config" + type: "object" + properties: + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + required: + - "chunk_size" + group: "processing" + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + description: "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + - title: "From Field" + type: "object" + properties: + mode: + title: "Mode" + default: "from_field" + const: "from_field" + enum: + - "from_field" + type: "string" + field_name: + title: "Field name" + description: "Name of the field in the record that contains the embedding" + examples: + - "embedding" + - "vector" + type: "string" + dimensions: + title: "Embedding dimensions" + description: "The number of dimensions the embedding model is generating" + examples: + - 1536 + - 384 + type: "integer" + required: + - "field_name" + - "dimensions" + description: "Use a field in the record as the embedding. This is useful\ + \ if you already have an embedding for your data and want to store it\ + \ in the vector store." + indexing: + title: "Indexing" + type: "object" + properties: + host: + title: "Public Endpoint" + description: "The public endpoint of the Milvus instance. " + order: 1 + examples: + - "https://my-instance.zone.zillizcloud.com" + - "tcp://host.docker.internal:19530" + - "tcp://my-local-milvus:19530" + type: "string" + db: + title: "Database Name" + description: "The database to connect to" + default: "" + type: "string" + collection: + title: "Collection Name" + description: "The collection to load data into" + order: 3 + type: "string" + auth: + title: "Authentication" + description: "Authentication method" + type: "object" + order: 2 + oneOf: + - title: "API Token" + type: "object" + properties: + mode: + title: "Mode" + default: "token" + const: "token" + enum: + - "token" + type: "string" + token: + title: "API Token" + description: "API Token for the Milvus instance" + airbyte_secret: true + type: "string" + required: + - "token" + description: "Authenticate using an API token (suitable for Zilliz\ + \ Cloud)" + - title: "Username/Password" + type: "object" + properties: + mode: + title: "Mode" + default: "username_password" + const: "username_password" + enum: + - "username_password" + type: "string" + username: + title: "Username" + description: "Username for the Milvus instance" + order: 1 + type: "string" + password: + title: "Password" + description: "Password for the Milvus instance" + airbyte_secret: true + order: 2 + type: "string" + required: + - "username" + - "password" + description: "Authenticate using username and password (suitable for\ + \ self-managed Milvus clusters)" + - title: "No auth" + type: "object" + properties: + mode: + title: "Mode" + default: "no_auth" + const: "no_auth" + enum: + - "no_auth" + type: "string" + description: "Do not authenticate (suitable for locally running test\ + \ clusters, do not use for clusters with public IP addresses)" + vector_field: + title: "Vector Field" + description: "The field in the entity that contains the vector" + default: "vector" + type: "string" + text_field: + title: "Text Field" + description: "The field in the entity that contains the embedded text" + default: "text" + type: "string" + required: + - "host" + - "collection" + - "auth" + group: "indexing" + description: "Indexing configuration" + required: + - "processing" + - "embedding" + - "indexing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" destination-firebolt: title: "Firebolt Spec" type: "object" @@ -54581,6 +55469,318 @@ components: type: "string" airbyte_secret: true order: 6 + destination-pinecone: + title: "Pinecone Destination Config" + type: "object" + properties: + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + airbyte_secret: true + type: "string" + pinecone_environment: + title: "Pinecone environment" + description: "Pinecone environment to use" + type: "string" + index: + title: "Index" + description: "Pinecone index to use" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + description: "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + required: + - "chunk_size" + group: "processing" + destinationType: + title: "pinecone" + const: "pinecone" + enum: + - "pinecone" + order: 0 + type: "string" + required: + - "indexing" + - "embedding" + - "processing" + - "destinationType" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" + destination-pinecone-update: + title: "Pinecone Destination Config" + type: "object" + properties: + indexing: + title: "Indexing" + type: "object" + properties: + pinecone_key: + title: "Pinecone API key" + airbyte_secret: true + type: "string" + pinecone_environment: + title: "Pinecone environment" + description: "Pinecone environment to use" + type: "string" + index: + title: "Index" + description: "Pinecone index to use" + type: "string" + required: + - "pinecone_key" + - "pinecone_environment" + - "index" + description: "Pinecone is a popular vector store that can be used to store\ + \ and retrieve embeddings." + group: "indexing" + embedding: + title: "Embedding" + description: "Embedding configuration" + group: "embedding" + type: "object" + oneOf: + - title: "OpenAI" + type: "object" + properties: + mode: + title: "Mode" + default: "openai" + const: "openai" + enum: + - "openai" + type: "string" + openai_key: + title: "OpenAI API key" + airbyte_secret: true + type: "string" + required: + - "openai_key" + description: "Use the OpenAI API to embed text. This option is using the\ + \ text-embedding-ada-002 model with 1536 embedding dimensions." + - title: "Cohere" + type: "object" + properties: + mode: + title: "Mode" + default: "cohere" + const: "cohere" + enum: + - "cohere" + type: "string" + cohere_key: + title: "Cohere API key" + airbyte_secret: true + type: "string" + required: + - "cohere_key" + description: "Use the Cohere API to embed text." + - title: "Fake" + type: "object" + properties: + mode: + title: "Mode" + default: "fake" + const: "fake" + enum: + - "fake" + type: "string" + description: "Use a fake embedding made out of random vectors with 1536\ + \ embedding dimensions. This is useful for testing the data pipeline\ + \ without incurring any costs." + processing: + title: "ProcessingConfigModel" + type: "object" + properties: + chunk_size: + title: "Chunk size" + description: "Size of chunks in tokens to store in vector store (make\ + \ sure it is not too big for the context if your LLM)" + maximum: 8191 + type: "integer" + chunk_overlap: + title: "Chunk overlap" + description: "Size of overlap between chunks in tokens to store in vector\ + \ store to better capture relevant context" + default: 0 + type: "integer" + text_fields: + title: "Text fields to embed" + description: "List of fields in the record that should be used to calculate\ + \ the embedding. The field list is applied to all streams in the same\ + \ way and non-existing fields are ignored. If none are defined, all\ + \ fields are considered text fields. When specifying text fields,\ + \ you can access nested fields in the record by using dot notation,\ + \ e.g. `user.name` will access the `name` field in the `user` object.\ + \ It's also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array." + default: [] + always_show: true + examples: + - "text" + - "user.name" + - "users.*.name" + type: "array" + items: + type: "string" + metadata_fields: + title: "Fields to store as metadata" + description: "List of fields in the record that should be stored as\ + \ metadata. The field list is applied to all streams in the same way\ + \ and non-existing fields are ignored. If none are defined, all fields\ + \ are considered metadata fields. When specifying text fields, you\ + \ can access nested fields in the record by using dot notation, e.g.\ + \ `user.name` will access the `name` field in the `user` object. It's\ + \ also possible to use wildcards to access all fields in an object,\ + \ e.g. `users.*.name` will access all `names` fields in all entries\ + \ of the `users` array. When specifying nested paths, all matching\ + \ values are flattened into an array set to a field named by the path." + default: [] + always_show: true + examples: + - "age" + - "user" + - "user.name" + type: "array" + items: + type: "string" + required: + - "chunk_size" + group: "processing" + required: + - "indexing" + - "embedding" + - "processing" + groups: + - id: "processing" + title: "Processing" + - id: "embedding" + title: "Embedding" + - id: "indexing" + title: "Indexing" destination-bigquery-denormalized: title: "BigQuery Denormalized Typed Struct Destination Spec" type: "object" @@ -56475,7 +57675,7 @@ components: order: 3 batch_size: title: "Batch size" - type: "string" + type: "integer" description: "How many documents should be imported together. Default 1000" order: 4 destinationType: @@ -56516,7 +57716,7 @@ components: order: 3 batch_size: title: "Batch size" - type: "string" + type: "integer" description: "How many documents should be imported together. Default 1000" order: 4 destination-bigquery: @@ -56748,17 +57948,11 @@ components: examples: - "15" order: 6 - use_1s1t_format: - type: "boolean" - description: "(Early Access) Use Destinations V2." - title: "Use Destinations V2 (Early Access)" - order: 7 raw_data_dataset: type: "string" - description: "(Early Access) The dataset to write raw tables into" - title: "Destinations V2 Raw Table Dataset (Early Access)" - order: 8 + description: "The dataset to write raw tables into" + title: "Destinations V2 Raw Table Dataset" + order: 7 destinationType: title: "bigquery" const: "bigquery" @@ -56994,17 +58188,11 @@ components: examples: - "15" order: 6 - use_1s1t_format: - type: "boolean" - description: "(Early Access) Use Destinations V2." - title: "Use Destinations V2 (Early Access)" - order: 7 raw_data_dataset: type: "string" - description: "(Early Access) The dataset to write raw tables into" - title: "Destinations V2 Raw Table Dataset (Early Access)" - order: 8 + description: "The dataset to write raw tables into" + title: "Destinations V2 Raw Table Dataset" + order: 7 destination-vertica: title: "Vertica Destination Spec" type: "object" @@ -59428,27 +60616,6 @@ components: type: "string" x-speakeasy-entity: Source_Convex x-speakeasy-param-suppress-computed-diff: true - SourceDatadogCreateRequest: - required: - - "name" - - "workspaceId" - - "configuration" - type: "object" - properties: - name: - type: "string" - workspaceId: - format: "uuid" - type: "string" - configuration: - $ref: "#/components/schemas/source-datadog" - secretId: - description: - "Optional secretID obtained through the public API OAuth redirect\ - \ flow." - type: "string" - x-speakeasy-entity: Source_Datadog - x-speakeasy-param-suppress-computed-diff: true SourceDatascopeCreateRequest: required: - "name" @@ -61066,27 +62233,6 @@ components: type: "string" x-speakeasy-entity: Source_Onesignal x-speakeasy-param-suppress-computed-diff: true - SourceOpenweatherCreateRequest: - required: - - "name" - - "workspaceId" - - "configuration" - type: "object" - properties: - name: - type: "string" - workspaceId: - format: "uuid" - type: "string" - configuration: - $ref: "#/components/schemas/source-openweather" - secretId: - description: - "Optional secretID obtained through the public API OAuth redirect\ - \ flow." - type: "string" - x-speakeasy-entity: Source_Openweather - x-speakeasy-param-suppress-computed-diff: true SourceOracleCreateRequest: required: - "name" @@ -61486,27 +62632,6 @@ components: type: "string" x-speakeasy-entity: Source_Prestashop x-speakeasy-param-suppress-computed-diff: true - SourcePublicApisCreateRequest: - required: - - "name" - - "workspaceId" - - "configuration" - type: "object" - properties: - name: - type: "string" - workspaceId: - format: "uuid" - type: "string" - configuration: - $ref: "#/components/schemas/source-public-apis" - secretId: - description: - "Optional secretID obtained through the public API OAuth redirect\ - \ flow." - type: "string" - x-speakeasy-entity: Source_PublicApis - x-speakeasy-param-suppress-computed-diff: true SourcePunkApiCreateRequest: required: - "name" @@ -63281,6 +64406,22 @@ components: $ref: "#/components/schemas/destination-langchain" x-speakeasy-entity: Destination_Langchain x-speakeasy-param-suppress-computed-diff: true + DestinationMilvusCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-milvus" + x-speakeasy-entity: Destination_Milvus + x-speakeasy-param-suppress-computed-diff: true DestinationMongodbCreateRequest: required: - "name" @@ -63345,6 +64486,22 @@ components: $ref: "#/components/schemas/destination-oracle" x-speakeasy-entity: Destination_Oracle x-speakeasy-param-suppress-computed-diff: true + DestinationPineconeCreateRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-pinecone" + x-speakeasy-entity: Destination_Pinecone + x-speakeasy-param-suppress-computed-diff: true DestinationPostgresCreateRequest: required: - "name" @@ -64065,22 +65222,6 @@ components: $ref: "#/components/schemas/source-convex-update" x-speakeasy-entity: Source_Convex x-speakeasy-param-suppress-computed-diff: true - SourceDatadogPutRequest: - required: - - "name" - - "workspaceId" - - "configuration" - type: "object" - properties: - name: - type: "string" - workspaceId: - format: "uuid" - type: "string" - configuration: - $ref: "#/components/schemas/source-datadog-update" - x-speakeasy-entity: Source_Datadog - x-speakeasy-param-suppress-computed-diff: true SourceDatascopePutRequest: required: - "name" @@ -65313,22 +66454,6 @@ components: $ref: "#/components/schemas/source-onesignal-update" x-speakeasy-entity: Source_Onesignal x-speakeasy-param-suppress-computed-diff: true - SourceOpenweatherPutRequest: - required: - - "name" - - "workspaceId" - - "configuration" - type: "object" - properties: - name: - type: "string" - workspaceId: - format: "uuid" - type: "string" - configuration: - $ref: "#/components/schemas/source-openweather-update" - x-speakeasy-entity: Source_Openweather - x-speakeasy-param-suppress-computed-diff: true SourceOraclePutRequest: required: - "name" @@ -65633,22 +66758,6 @@ components: $ref: "#/components/schemas/source-prestashop-update" x-speakeasy-entity: Source_Prestashop x-speakeasy-param-suppress-computed-diff: true - SourcePublicApisPutRequest: - required: - - "name" - - "workspaceId" - - "configuration" - type: "object" - properties: - name: - type: "string" - workspaceId: - format: "uuid" - type: "string" - configuration: - $ref: "#/components/schemas/source-public-apis-update" - x-speakeasy-entity: Source_PublicApis - x-speakeasy-param-suppress-computed-diff: true SourcePunkApiPutRequest: required: - "name" @@ -67073,6 +68182,22 @@ components: $ref: "#/components/schemas/destination-langchain-update" x-speakeasy-entity: Destination_Langchain x-speakeasy-param-suppress-computed-diff: true + DestinationMilvusPutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-milvus-update" + x-speakeasy-entity: Destination_Milvus + x-speakeasy-param-suppress-computed-diff: true DestinationMongodbPutRequest: required: - "name" @@ -67137,6 +68262,22 @@ components: $ref: "#/components/schemas/destination-oracle-update" x-speakeasy-entity: Destination_Oracle x-speakeasy-param-suppress-computed-diff: true + DestinationPineconePutRequest: + required: + - "name" + - "workspaceId" + - "configuration" + type: "object" + properties: + name: + type: "string" + workspaceId: + format: "uuid" + type: "string" + configuration: + $ref: "#/components/schemas/destination-pinecone-update" + x-speakeasy-entity: Destination_Pinecone + x-speakeasy-param-suppress-computed-diff: true DestinationPostgresPutRequest: required: - "name" diff --git a/docs/data-sources/destination_bigquery.md b/docs/data-sources/destination_bigquery.md index ec856764e..74596f1dd 100644 --- a/docs/data-sources/destination_bigquery.md +++ b/docs/data-sources/destination_bigquery.md @@ -44,10 +44,9 @@ The location of the dataset. Warning: Changes made after creation will not be ap - `destination_type` (String) must be one of ["bigquery"] - `loading_method` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method)) - `project_id` (String) The GCP project ID for the project containing the target BigQuery dataset. Read more here. -- `raw_data_dataset` (String) (Early Access) The dataset to write raw tables into +- `raw_data_dataset` (String) The dataset to write raw tables into - `transformation_priority` (String) must be one of ["interactive", "batch"] Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. -- `use_1s1t_format` (Boolean) (Early Access) Use Destinations V2. ### Nested Schema for `configuration.loading_method` diff --git a/docs/data-sources/destination_milvus.md b/docs/data-sources/destination_milvus.md new file mode 100644 index 000000000..515a84e8e --- /dev/null +++ b/docs/data-sources/destination_milvus.md @@ -0,0 +1,221 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "airbyte_destination_milvus Data Source - terraform-provider-airbyte" +subcategory: "" +description: |- + DestinationMilvus DataSource +--- + +# airbyte_destination_milvus (Data Source) + +DestinationMilvus DataSource + +## Example Usage + +```terraform +data "airbyte_destination_milvus" "my_destination_milvus" { + destination_id = "...my_destination_id..." +} +``` + + +## Schema + +### Required + +- `destination_id` (String) + +### Read-Only + +- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) +- `name` (String) +- `workspace_id` (String) + + +### Nested Schema for `configuration` + +Read-Only: + +- `destination_type` (String) must be one of ["milvus"] +- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding)) +- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing)) +- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing)) + + +### Nested Schema for `configuration.embedding` + +Read-Only: + +- `destination_milvus_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_cohere)) +- `destination_milvus_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_fake)) +- `destination_milvus_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_from_field)) +- `destination_milvus_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_open_ai)) +- `destination_milvus_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_cohere)) +- `destination_milvus_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_fake)) +- `destination_milvus_update_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_from_field)) +- `destination_milvus_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_open_ai)) + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_cohere` + +Read-Only: + +- `cohere_key` (String) +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_fake` + +Read-Only: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_from_field` + +Read-Only: + +- `dimensions` (Number) The number of dimensions the embedding model is generating +- `field_name` (String) Name of the field in the record that contains the embedding +- `mode` (String) must be one of ["from_field"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_open_ai` + +Read-Only: + +- `mode` (String) must be one of ["openai"] +- `openai_key` (String) + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_cohere` + +Read-Only: + +- `cohere_key` (String) +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_fake` + +Read-Only: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_from_field` + +Read-Only: + +- `dimensions` (Number) The number of dimensions the embedding model is generating +- `field_name` (String) Name of the field in the record that contains the embedding +- `mode` (String) must be one of ["from_field"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_open_ai` + +Read-Only: + +- `mode` (String) must be one of ["openai"] +- `openai_key` (String) + + + + +### Nested Schema for `configuration.indexing` + +Read-Only: + +- `auth` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--indexing--auth)) +- `collection` (String) The collection to load data into +- `db` (String) The database to connect to +- `host` (String) The public endpoint of the Milvus instance. +- `text_field` (String) The field in the entity that contains the embedded text +- `vector_field` (String) The field in the entity that contains the vector + + +### Nested Schema for `configuration.indexing.auth` + +Read-Only: + +- `destination_milvus_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_api_token)) +- `destination_milvus_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_no_auth)) +- `destination_milvus_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_username_password)) +- `destination_milvus_update_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_api_token)) +- `destination_milvus_update_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_no_auth)) +- `destination_milvus_update_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_username_password)) + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Read-Only: + +- `mode` (String) must be one of ["token"] +- `token` (String) API Token for the Milvus instance + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Read-Only: + +- `mode` (String) must be one of ["no_auth"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Read-Only: + +- `mode` (String) must be one of ["username_password"] +- `password` (String) Password for the Milvus instance +- `username` (String) Username for the Milvus instance + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Read-Only: + +- `mode` (String) must be one of ["token"] +- `token` (String) API Token for the Milvus instance + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Read-Only: + +- `mode` (String) must be one of ["no_auth"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Read-Only: + +- `mode` (String) must be one of ["username_password"] +- `password` (String) Password for the Milvus instance +- `username` (String) Username for the Milvus instance + + + + + +### Nested Schema for `configuration.processing` + +Read-Only: + +- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context +- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) +- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. +- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + + diff --git a/docs/data-sources/destination_pinecone.md b/docs/data-sources/destination_pinecone.md new file mode 100644 index 000000000..a8cbc6933 --- /dev/null +++ b/docs/data-sources/destination_pinecone.md @@ -0,0 +1,129 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "airbyte_destination_pinecone Data Source - terraform-provider-airbyte" +subcategory: "" +description: |- + DestinationPinecone DataSource +--- + +# airbyte_destination_pinecone (Data Source) + +DestinationPinecone DataSource + +## Example Usage + +```terraform +data "airbyte_destination_pinecone" "my_destination_pinecone" { + destination_id = "...my_destination_id..." +} +``` + + +## Schema + +### Required + +- `destination_id` (String) + +### Read-Only + +- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) +- `name` (String) +- `workspace_id` (String) + + +### Nested Schema for `configuration` + +Read-Only: + +- `destination_type` (String) must be one of ["pinecone"] +- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding)) +- `indexing` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. (see [below for nested schema](#nestedatt--configuration--indexing)) +- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing)) + + +### Nested Schema for `configuration.embedding` + +Read-Only: + +- `destination_pinecone_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_cohere)) +- `destination_pinecone_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_fake)) +- `destination_pinecone_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_open_ai)) +- `destination_pinecone_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_cohere)) +- `destination_pinecone_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_fake)) +- `destination_pinecone_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_open_ai)) + + +### Nested Schema for `configuration.embedding.destination_pinecone_embedding_cohere` + +Read-Only: + +- `cohere_key` (String) +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_embedding_fake` + +Read-Only: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_embedding_open_ai` + +Read-Only: + +- `mode` (String) must be one of ["openai"] +- `openai_key` (String) + + + +### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_cohere` + +Read-Only: + +- `cohere_key` (String) +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_fake` + +Read-Only: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_open_ai` + +Read-Only: + +- `mode` (String) must be one of ["openai"] +- `openai_key` (String) + + + + +### Nested Schema for `configuration.indexing` + +Read-Only: + +- `index` (String) Pinecone index to use +- `pinecone_environment` (String) Pinecone environment to use +- `pinecone_key` (String) + + + +### Nested Schema for `configuration.processing` + +Read-Only: + +- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context +- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) +- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. +- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + + diff --git a/docs/data-sources/destination_snowflake.md b/docs/data-sources/destination_snowflake.md index 4558e3115..b68969f18 100644 --- a/docs/data-sources/destination_snowflake.md +++ b/docs/data-sources/destination_snowflake.md @@ -41,10 +41,9 @@ Read-Only: - `destination_type` (String) must be one of ["snowflake"] - `host` (String) Enter your Snowflake account's locator (in the format ...snowflakecomputing.com) - `jdbc_url_params` (String) Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 -- `raw_data_schema` (String) (Beta) The schema to write raw tables into +- `raw_data_schema` (String) The schema to write raw tables into - `role` (String) Enter the role that you want to use to access Snowflake - `schema` (String) Enter the name of the default schema -- `use_1s1t_format` (Boolean) (Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program. - `username` (String) Enter the name of the user you want to use to access the database - `warehouse` (String) Enter the name of the warehouse that you want to sync data into diff --git a/docs/data-sources/destination_typesense.md b/docs/data-sources/destination_typesense.md index 3b56d4af8..11db436ef 100644 --- a/docs/data-sources/destination_typesense.md +++ b/docs/data-sources/destination_typesense.md @@ -37,7 +37,7 @@ data "airbyte_destination_typesense" "my_destination_typesense" { Read-Only: - `api_key` (String) Typesense API Key -- `batch_size` (String) How many documents should be imported together. Default 1000 +- `batch_size` (Number) How many documents should be imported together. Default 1000 - `destination_type` (String) must be one of ["typesense"] - `host` (String) Hostname of the Typesense instance without protocol. - `port` (String) Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443 diff --git a/docs/data-sources/source_amazon_ads.md b/docs/data-sources/source_amazon_ads.md index 88d1ab2f3..e320fca0e 100644 --- a/docs/data-sources/source_amazon_ads.md +++ b/docs/data-sources/source_amazon_ads.md @@ -45,7 +45,8 @@ Read-Only: - `client_id` (String) The client ID of your Amazon Ads developer application. See the docs for more information. - `client_secret` (String) The client secret of your Amazon Ads developer application. See the docs for more information. - `look_back_window` (Number) The amount of days to go back in time to get the updated data from Amazon Ads -- `profiles` (List of Number) Profile IDs you want to fetch data for. See docs for more details. +- `marketplace_ids` (List of String) Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. +- `profiles` (List of Number) Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. - `refresh_token` (String) Amazon Ads refresh token. See the docs for more information on how to obtain this token. - `region` (String) must be one of ["NA", "EU", "FE"] Region to pull data from (EU/NA/FE). See docs for more details. diff --git a/docs/data-sources/source_apify_dataset.md b/docs/data-sources/source_apify_dataset.md index 31825b540..3a3966b70 100644 --- a/docs/data-sources/source_apify_dataset.md +++ b/docs/data-sources/source_apify_dataset.md @@ -44,5 +44,6 @@ Read-Only: - `clean` (Boolean) If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false. - `dataset_id` (String) ID of the dataset you would like to load to Airbyte. - `source_type` (String) must be one of ["apify-dataset"] +- `token` (String) Your application's Client Secret. You can find this value on the console integrations tab after you login. diff --git a/docs/data-sources/source_auth0.md b/docs/data-sources/source_auth0.md index 27cb33f67..c7ff792a7 100644 --- a/docs/data-sources/source_auth0.md +++ b/docs/data-sources/source_auth0.md @@ -44,6 +44,7 @@ Read-Only: - `base_url` (String) The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN` - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) - `source_type` (String) must be one of ["auth0"] +- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. ### Nested Schema for `configuration.credentials` diff --git a/docs/data-sources/source_datadog.md b/docs/data-sources/source_datadog.md deleted file mode 100644 index 1fb84ee64..000000000 --- a/docs/data-sources/source_datadog.md +++ /dev/null @@ -1,65 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "airbyte_source_datadog Data Source - terraform-provider-airbyte" -subcategory: "" -description: |- - SourceDatadog DataSource ---- - -# airbyte_source_datadog (Data Source) - -SourceDatadog DataSource - -## Example Usage - -```terraform -data "airbyte_source_datadog" "my_source_datadog" { - secret_id = "...my_secret_id..." - source_id = "...my_source_id..." -} -``` - - -## Schema - -### Required - -- `source_id` (String) - -### Optional - -- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow. - -### Read-Only - -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) -- `name` (String) -- `workspace_id` (String) - - -### Nested Schema for `configuration` - -Read-Only: - -- `api_key` (String) Datadog API key -- `application_key` (String) Datadog application key -- `end_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. -- `max_records_per_request` (Number) Maximum number of records to collect per request. -- `queries` (Attributes List) List of queries to be run and used as inputs. (see [below for nested schema](#nestedatt--configuration--queries)) -- `query` (String) The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. -- `site` (String) must be one of ["datadoghq.com", "us3.datadoghq.com", "us5.datadoghq.com", "datadoghq.eu", "ddog-gov.com"] -The site where Datadog data resides in. -- `source_type` (String) must be one of ["datadog"] -- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. - - -### Nested Schema for `configuration.queries` - -Read-Only: - -- `data_source` (String) must be one of ["metrics", "cloud_cost", "logs", "rum"] -A data source that is powered by the platform. -- `name` (String) The variable name for use in queries. -- `query` (String) A classic query string. - - diff --git a/docs/data-sources/source_google_ads.md b/docs/data-sources/source_google_ads.md index 4d7836279..d69d22932 100644 --- a/docs/data-sources/source_google_ads.md +++ b/docs/data-sources/source_google_ads.md @@ -45,10 +45,10 @@ Read-Only: - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) - `custom_queries` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_queries)) - `customer_id` (String) Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation. -- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. +- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set) - `login_customer_id` (String) If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation. - `source_type` (String) must be one of ["google-ads"] -- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. +- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set) ### Nested Schema for `configuration.credentials` diff --git a/docs/data-sources/source_google_search_console.md b/docs/data-sources/source_google_search_console.md index 6075c0a7d..c13b41baa 100644 --- a/docs/data-sources/source_google_search_console.md +++ b/docs/data-sources/source_google_search_console.md @@ -42,13 +42,14 @@ data "airbyte_source_google_search_console" "my_source_googlesearchconsole" { Read-Only: - `authorization` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization)) -- `custom_reports` (String) A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field. +- `custom_reports` (String) (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports. +- `custom_reports_array` (Attributes List) You can add your Custom Analytics report by creating one. (see [below for nested schema](#nestedatt--configuration--custom_reports_array)) - `data_state` (String) must be one of ["final", "all"] -If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. -- `end_date` (String) UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field. -- `site_urls` (List of String) The URLs of the website property attached to your GSC account. Read more here. +If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. +- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. +- `site_urls` (List of String) The URLs of the website property attached to your GSC account. Learn more about properties here. - `source_type` (String) must be one of ["google-search-console"] -- `start_date` (String) UTC date in the format 2017-01-25. Any data before this date will not be replicated. +- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. ### Nested Schema for `configuration.authorization` @@ -104,3 +105,13 @@ Read-Only: - `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here. + + +### Nested Schema for `configuration.custom_reports_array` + +Read-Only: + +- `dimensions` (List of String) A list of dimensions (country, date, device, page, query) +- `name` (String) The name of the custom report, this name would be used as stream name + + diff --git a/docs/data-sources/source_google_sheets.md b/docs/data-sources/source_google_sheets.md index d3805bff5..9a1e2595f 100644 --- a/docs/data-sources/source_google_sheets.md +++ b/docs/data-sources/source_google_sheets.md @@ -43,7 +43,6 @@ Read-Only: - `credentials` (Attributes) Credentials for connecting to the Google Sheets API (see [below for nested schema](#nestedatt--configuration--credentials)) - `names_conversion` (Boolean) Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based. -- `row_batch_size` (Number) The number of rows fetched when making a Google Sheet API call. Defaults to 200. - `source_type` (String) must be one of ["google-sheets"] - `spreadsheet_id` (String) Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'. diff --git a/docs/data-sources/source_intercom.md b/docs/data-sources/source_intercom.md index cf3e98321..4c16c3ba4 100644 --- a/docs/data-sources/source_intercom.md +++ b/docs/data-sources/source_intercom.md @@ -42,6 +42,8 @@ data "airbyte_source_intercom" "my_source_intercom" { Read-Only: - `access_token` (String) Access token for making authenticated requests. See the Intercom docs for more information. +- `client_id` (String) Client Id for your Intercom application. +- `client_secret` (String) Client Secret for your Intercom application. - `source_type` (String) must be one of ["intercom"] - `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. diff --git a/docs/data-sources/source_lemlist.md b/docs/data-sources/source_lemlist.md index 0af2faff2..c08e60316 100644 --- a/docs/data-sources/source_lemlist.md +++ b/docs/data-sources/source_lemlist.md @@ -41,7 +41,7 @@ data "airbyte_source_lemlist" "my_source_lemlist" { Read-Only: -- `api_key` (String) Lemlist API key. +- `api_key` (String) Lemlist API key, - `source_type` (String) must be one of ["lemlist"] diff --git a/docs/data-sources/source_linkedin_ads.md b/docs/data-sources/source_linkedin_ads.md index 5dbe44cd5..2cb32b690 100644 --- a/docs/data-sources/source_linkedin_ads.md +++ b/docs/data-sources/source_linkedin_ads.md @@ -41,26 +41,22 @@ data "airbyte_source_linkedin_ads" "my_source_linkedinads" { Read-Only: -- `account_ids` (List of Number) Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info. +- `account_ids` (List of Number) Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs. - `ad_analytics_reports` (Attributes List) (see [below for nested schema](#nestedatt--configuration--ad_analytics_reports)) - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) - `source_type` (String) must be one of ["linkedin-ads"] -- `start_date` (String) UTC date in the format 2020-09-17. Any data before this date will not be replicated. +- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. ### Nested Schema for `configuration.ad_analytics_reports` Read-Only: -- `name` (String) The name for the report +- `name` (String) The name for the custom report. - `pivot_by` (String) must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"] -Select value from list to pivot by +Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. - `time_granularity` (String) must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"] -Set time granularity for report: -ALL - Results grouped into a single result across the entire time range of the report. -DAILY - Results grouped by day. -MONTHLY - Results grouped by month. -YEARLY - Results grouped by year. +Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. @@ -78,7 +74,7 @@ Read-Only: Read-Only: -- `access_token` (String) The token value generated using the authentication code. See the docs to obtain yours. +- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information. - `auth_method` (String) must be one of ["access_token"] @@ -88,9 +84,9 @@ Read-Only: Read-Only: - `auth_method` (String) must be one of ["oAuth2.0"] -- `client_id` (String) The client ID of the LinkedIn Ads developer application. -- `client_secret` (String) The client secret the LinkedIn Ads developer application. -- `refresh_token` (String) The key to refresh the expired access token. +- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information. +- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information. +- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information. @@ -98,7 +94,7 @@ Read-Only: Read-Only: -- `access_token` (String) The token value generated using the authentication code. See the docs to obtain yours. +- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information. - `auth_method` (String) must be one of ["access_token"] @@ -108,8 +104,8 @@ Read-Only: Read-Only: - `auth_method` (String) must be one of ["oAuth2.0"] -- `client_id` (String) The client ID of the LinkedIn Ads developer application. -- `client_secret` (String) The client secret the LinkedIn Ads developer application. -- `refresh_token` (String) The key to refresh the expired access token. +- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information. +- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information. +- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information. diff --git a/docs/data-sources/source_mssql.md b/docs/data-sources/source_mssql.md index 9e1901f29..ded4cb2bc 100644 --- a/docs/data-sources/source_mssql.md +++ b/docs/data-sources/source_mssql.md @@ -46,7 +46,7 @@ Read-Only: - `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). - `password` (String) The password associated with the username. - `port` (Number) The port of the database. -- `replication_method` (Attributes) The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. (see [below for nested schema](#nestedatt--configuration--replication_method)) +- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method)) - `schemas` (List of String) The list of schemas to sync from. Defaults to user. Case sensitive. - `source_type` (String) must be one of ["mssql"] - `ssl_method` (Attributes) The encryption method which is used when communicating with the database. (see [below for nested schema](#nestedatt--configuration--ssl_method)) @@ -58,13 +58,13 @@ Read-Only: Read-Only: -- `source_mssql_replication_method_logical_replication_cdc` (Attributes) CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_replication_method_logical_replication_cdc)) -- `source_mssql_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_replication_method_standard)) -- `source_mssql_update_replication_method_logical_replication_cdc` (Attributes) CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_replication_method_logical_replication_cdc)) -- `source_mssql_update_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_replication_method_standard)) +- `source_mssql_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_read_changes_using_change_data_capture_cdc)) +- `source_mssql_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_scan_changes_with_user_defined_cursor)) +- `source_mssql_update_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_read_changes_using_change_data_capture_cdc)) +- `source_mssql_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_scan_changes_with_user_defined_cursor)) - -### Nested Schema for `configuration.replication_method.source_mssql_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_mssql_update_method_read_changes_using_change_data_capture_cdc` Read-Only: @@ -76,16 +76,16 @@ What data should be synced under the CDC. "Existing and New" will read existing Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. - -### Nested Schema for `configuration.replication_method.source_mssql_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_mssql_update_method_scan_changes_with_user_defined_cursor` Read-Only: - `method` (String) must be one of ["STANDARD"] - -### Nested Schema for `configuration.replication_method.source_mssql_update_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_read_changes_using_change_data_capture_cdc` Read-Only: @@ -97,8 +97,8 @@ What data should be synced under the CDC. "Existing and New" will read existing Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. - -### Nested Schema for `configuration.replication_method.source_mssql_update_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_scan_changes_with_user_defined_cursor` Read-Only: diff --git a/docs/data-sources/source_openweather.md b/docs/data-sources/source_openweather.md deleted file mode 100644 index 8efa6311a..000000000 --- a/docs/data-sources/source_openweather.md +++ /dev/null @@ -1,53 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "airbyte_source_openweather Data Source - terraform-provider-airbyte" -subcategory: "" -description: |- - SourceOpenweather DataSource ---- - -# airbyte_source_openweather (Data Source) - -SourceOpenweather DataSource - -## Example Usage - -```terraform -data "airbyte_source_openweather" "my_source_openweather" { - secret_id = "...my_secret_id..." - source_id = "...my_source_id..." -} -``` - - -## Schema - -### Required - -- `source_id` (String) - -### Optional - -- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow. - -### Read-Only - -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) -- `name` (String) -- `workspace_id` (String) - - -### Nested Schema for `configuration` - -Read-Only: - -- `appid` (String) Your OpenWeather API Key. See here. The key is case sensitive. -- `lang` (String) must be one of ["af", "al", "ar", "az", "bg", "ca", "cz", "da", "de", "el", "en", "eu", "fa", "fi", "fr", "gl", "he", "hi", "hr", "hu", "id", "it", "ja", "kr", "la", "lt", "mk", "no", "nl", "pl", "pt", "pt_br", "ro", "ru", "sv", "se", "sk", "sl", "sp", "es", "sr", "th", "tr", "ua", "uk", "vi", "zh_cn", "zh_tw", "zu"] -You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. -- `lat` (String) Latitude for which you want to get weather condition from. (min -90, max 90) -- `lon` (String) Longitude for which you want to get weather condition from. (min -180, max 180) -- `source_type` (String) must be one of ["openweather"] -- `units` (String) must be one of ["standard", "metric", "imperial"] -Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - - diff --git a/docs/data-sources/source_postgres.md b/docs/data-sources/source_postgres.md index cc51f8fc9..54cf73891 100644 --- a/docs/data-sources/source_postgres.md +++ b/docs/data-sources/source_postgres.md @@ -46,7 +46,7 @@ Read-Only: - `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters. - `password` (String) Password associated with the username. - `port` (Number) Port of the database. -- `replication_method` (Attributes) Replication method for extracting data from the database. (see [below for nested schema](#nestedatt--configuration--replication_method)) +- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method)) - `schemas` (List of String) The list of schemas (case sensitive) to sync from. Defaults to public. - `source_type` (String) must be one of ["postgres"] - `ssl_mode` (Attributes) SSL connection modes. @@ -59,15 +59,23 @@ Read-Only: Read-Only: -- `source_postgres_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_replication_method_logical_replication_cdc)) -- `source_postgres_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_replication_method_standard)) -- `source_postgres_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_replication_method_standard_xmin)) -- `source_postgres_update_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_replication_method_logical_replication_cdc)) -- `source_postgres_update_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_replication_method_standard)) -- `source_postgres_update_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_replication_method_standard_xmin)) +- `source_postgres_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_detect_changes_with_xmin_system_column)) +- `source_postgres_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_read_changes_using_write_ahead_log_cdc)) +- `source_postgres_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_scan_changes_with_user_defined_cursor)) +- `source_postgres_update_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_detect_changes_with_xmin_system_column)) +- `source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc)) +- `source_postgres_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_scan_changes_with_user_defined_cursor)) - -### Nested Schema for `configuration.replication_method.source_postgres_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_postgres_update_method_detect_changes_with_xmin_system_column` + +Read-Only: + +- `method` (String) must be one of ["Xmin"] + + + +### Nested Schema for `configuration.replication_method.source_postgres_update_method_read_changes_using_write_ahead_log_cdc` Optional: @@ -86,24 +94,24 @@ A logical decoding plugin installed on the PostgreSQL server. - `replication_slot` (String) A plugin logical replication slot. Read about replication slots. - -### Nested Schema for `configuration.replication_method.source_postgres_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_postgres_update_method_scan_changes_with_user_defined_cursor` Read-Only: - `method` (String) must be one of ["Standard"] - -### Nested Schema for `configuration.replication_method.source_postgres_replication_method_standard_xmin` + +### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_detect_changes_with_xmin_system_column` Read-Only: - `method` (String) must be one of ["Xmin"] - -### Nested Schema for `configuration.replication_method.source_postgres_update_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc` Optional: @@ -122,22 +130,14 @@ A logical decoding plugin installed on the PostgreSQL server. - `replication_slot` (String) A plugin logical replication slot. Read about replication slots. - -### Nested Schema for `configuration.replication_method.source_postgres_update_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_scan_changes_with_user_defined_cursor` Read-Only: - `method` (String) must be one of ["Standard"] - -### Nested Schema for `configuration.replication_method.source_postgres_update_replication_method_standard_xmin` - -Read-Only: - -- `method` (String) must be one of ["Xmin"] - - ### Nested Schema for `configuration.ssl_mode` diff --git a/docs/data-sources/source_posthog.md b/docs/data-sources/source_posthog.md index 30ec6b193..e076d3b63 100644 --- a/docs/data-sources/source_posthog.md +++ b/docs/data-sources/source_posthog.md @@ -43,6 +43,7 @@ Read-Only: - `api_key` (String) API Key. See the docs for information on how to generate this key. - `base_url` (String) Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com). +- `events_time_step` (Number) Set lower value in case of failing long running sync of events stream. - `source_type` (String) must be one of ["posthog"] - `start_date` (String) The date from which you'd like to replicate the data. Any data before this date will not be replicated. diff --git a/docs/data-sources/source_public_apis.md b/docs/data-sources/source_public_apis.md deleted file mode 100644 index d8620cf8c..000000000 --- a/docs/data-sources/source_public_apis.md +++ /dev/null @@ -1,46 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "airbyte_source_public_apis Data Source - terraform-provider-airbyte" -subcategory: "" -description: |- - SourcePublicApis DataSource ---- - -# airbyte_source_public_apis (Data Source) - -SourcePublicApis DataSource - -## Example Usage - -```terraform -data "airbyte_source_public_apis" "my_source_publicapis" { - secret_id = "...my_secret_id..." - source_id = "...my_source_id..." -} -``` - - -## Schema - -### Required - -- `source_id` (String) - -### Optional - -- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow. - -### Read-Only - -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) -- `name` (String) -- `workspace_id` (String) - - -### Nested Schema for `configuration` - -Read-Only: - -- `source_type` (String) must be one of ["public-apis"] - - diff --git a/docs/data-sources/source_s3.md b/docs/data-sources/source_s3.md index ffa6655b1..24db6a956 100644 --- a/docs/data-sources/source_s3.md +++ b/docs/data-sources/source_s3.md @@ -32,7 +32,8 @@ data "airbyte_source_s3" "my_source_s3" { ### Read-Only -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) +- `configuration` (Attributes) NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes +because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. (see [below for nested schema](#nestedatt--configuration)) - `name` (String) - `workspace_id` (String) @@ -41,12 +42,18 @@ data "airbyte_source_s3" "my_source_s3" { Read-Only: -- `dataset` (String) The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. -- `format` (Attributes) The format of the files you'd like to replicate (see [below for nested schema](#nestedatt--configuration--format)) -- `path_pattern` (String) A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. -- `provider` (Attributes) Use this to load files from S3 or S3-compatible services (see [below for nested schema](#nestedatt--configuration--provider)) -- `schema` (String) Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. +- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. +- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. +- `bucket` (String) Name of the S3 bucket where the file(s) exist. +- `dataset` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. +- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS. +- `format` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate (see [below for nested schema](#nestedatt--configuration--format)) +- `path_pattern` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. +- `provider` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services (see [below for nested schema](#nestedatt--configuration--provider)) +- `schema` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. - `source_type` (String) must be one of ["s3"] +- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. +- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams)) ### Nested Schema for `configuration.format` @@ -174,3 +181,200 @@ Read-Only: - `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated. + +### Nested Schema for `configuration.streams` + +Read-Only: + +- `days_to_sync_if_history_is_full` (Number) When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. +- `file_type` (String) The data file type that is being extracted for a stream. +- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format)) +- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. +- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. +- `legacy_prefix` (String) The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. +- `name` (String) The name of the stream. +- `primary_key` (String) The column or columns (for a composite key) that serves as the unique identifier of a record. +- `schemaless` (Boolean) When enabled, syncs will not validate or structure records against the stream's schema. +- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"] +The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + + +### Nested Schema for `configuration.streams.format` + +Read-Only: + +- `source_s3_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_avro_format)) +- `source_s3_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_csv_format)) +- `source_s3_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_jsonl_format)) +- `source_s3_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_parquet_format)) +- `source_s3_update_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_avro_format)) +- `source_s3_update_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_csv_format)) +- `source_s3_update_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_jsonl_format)) +- `source_s3_update_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format)) + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. +- `filetype` (String) must be one of ["avro"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. +- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data. +- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. +- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank. +- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values. +- `filetype` (String) must be one of ["csv"] +- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition)) +- `inference_type` (String) must be one of ["None", "Primitive Types Only"] +How to infer the types of the columns. If none, inference default to strings. +- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. +- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank. +- `skip_rows_after_header` (Number) The number of rows to skip after the header row. +- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. +- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. +- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values. + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition` + +Read-Only: + +- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated)) +- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv)) +- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided)) + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Read-Only: + +- `header_definition_type` (String) must be one of ["Autogenerated"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Read-Only: + +- `header_definition_type` (String) must be one of ["From CSV"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Read-Only: + +- `column_names` (List of String) The column names that will be used while emitting the CSV records +- `header_definition_type` (String) must be one of ["User Provided"] + + + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `filetype` (String) must be one of ["jsonl"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. +- `filetype` (String) must be one of ["parquet"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. +- `filetype` (String) must be one of ["avro"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. +- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data. +- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. +- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank. +- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values. +- `filetype` (String) must be one of ["csv"] +- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition)) +- `inference_type` (String) must be one of ["None", "Primitive Types Only"] +How to infer the types of the columns. If none, inference default to strings. +- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. +- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank. +- `skip_rows_after_header` (Number) The number of rows to skip after the header row. +- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. +- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. +- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values. + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition` + +Read-Only: + +- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated)) +- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv)) +- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided)) + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Read-Only: + +- `header_definition_type` (String) must be one of ["Autogenerated"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Read-Only: + +- `header_definition_type` (String) must be one of ["From CSV"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Read-Only: + +- `column_names` (List of String) The column names that will be used while emitting the CSV records +- `header_definition_type` (String) must be one of ["User Provided"] + + + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `filetype` (String) must be one of ["jsonl"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Read-Only: + +- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. +- `filetype` (String) must be one of ["parquet"] + + diff --git a/docs/data-sources/source_stripe.md b/docs/data-sources/source_stripe.md index b82fe6c56..a8b965c93 100644 --- a/docs/data-sources/source_stripe.md +++ b/docs/data-sources/source_stripe.md @@ -43,7 +43,7 @@ Read-Only: - `account_id` (String) Your Stripe account ID (starts with 'acct_', find yours here). - `client_secret` (String) Stripe API key (usually starts with 'sk_live_'; find yours here). -- `lookback_window_days` (Number) When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here +- `lookback_window_days` (Number) When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here - `slice_range` (Number) The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted. - `source_type` (String) must be one of ["stripe"] - `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. diff --git a/docs/data-sources/source_zendesk_sunshine.md b/docs/data-sources/source_zendesk_sunshine.md index b6cff1496..20b70b156 100644 --- a/docs/data-sources/source_zendesk_sunshine.md +++ b/docs/data-sources/source_zendesk_sunshine.md @@ -59,10 +59,6 @@ Read-Only: ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_api_token` -Optional: - -- `additional_properties` (String) Parsed as JSON. - Read-Only: - `api_token` (String) API Token. See the docs for information on how to generate this key. @@ -73,10 +69,6 @@ Read-Only: ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_o_auth2_0` -Optional: - -- `additional_properties` (String) Parsed as JSON. - Read-Only: - `access_token` (String) Long-term access Token for making authenticated requests. @@ -88,10 +80,6 @@ Read-Only: ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_api_token` -Optional: - -- `additional_properties` (String) Parsed as JSON. - Read-Only: - `api_token` (String) API Token. See the docs for information on how to generate this key. @@ -102,10 +90,6 @@ Read-Only: ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_o_auth2_0` -Optional: - -- `additional_properties` (String) Parsed as JSON. - Read-Only: - `access_token` (String) Long-term access Token for making authenticated requests. diff --git a/docs/index.md b/docs/index.md index ba2c16ce9..be28658ed 100644 --- a/docs/index.md +++ b/docs/index.md @@ -17,7 +17,7 @@ terraform { required_providers { airbyte = { source = "airbytehq/airbyte" - version = "0.3.3" + version = "0.3.4" } } } diff --git a/docs/resources/destination_bigquery.md b/docs/resources/destination_bigquery.md index 006b6aac5..aaf0fbe69 100644 --- a/docs/resources/destination_bigquery.md +++ b/docs/resources/destination_bigquery.md @@ -39,10 +39,9 @@ resource "airbyte_destination_bigquery" "my_destination_bigquery" { project_id = "...my_project_id..." raw_data_dataset = "...my_raw_data_dataset..." transformation_priority = "batch" - use_1s1t_format = false } - name = "Alison Mann" - workspace_id = "488e1e91-e450-4ad2-abd4-4269802d502a" + name = "Edna Pouros" + workspace_id = "d488e1e9-1e45-40ad-aabd-44269802d502" } ``` @@ -76,10 +75,9 @@ Optional: - `big_query_client_buffer_size_mb` (Number) Google BigQuery client's chunk (buffer) size (MIN=1, MAX = 15) for each table. The size that will be written by a single RPC. Written data will be buffered and only flushed upon reaching this size or closing the channel. The default 15MB value is used if not set explicitly. Read more here. - `credentials_json` (String) The contents of the JSON service account key. Check out the docs if you need help generating this key. Default credentials will be used if this field is left empty. - `loading_method` (Attributes) Loading method used to send select the way data will be uploaded to BigQuery.
Standard Inserts - Direct uploading using SQL INSERT statements. This method is extremely inefficient and provided only for quick testing. In almost all cases, you should use staging.
GCS Staging - Writes large batches of records to a file, uploads the file to GCS, then uses COPY INTO table to upload the file. Recommended for most workloads for better speed and scalability. Read more about GCS Staging here. (see [below for nested schema](#nestedatt--configuration--loading_method)) -- `raw_data_dataset` (String) (Early Access) The dataset to write raw tables into +- `raw_data_dataset` (String) The dataset to write raw tables into - `transformation_priority` (String) must be one of ["interactive", "batch"] Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. -- `use_1s1t_format` (Boolean) (Early Access) Use Destinations V2. ### Nested Schema for `configuration.loading_method` diff --git a/docs/resources/destination_bigquery_denormalized.md b/docs/resources/destination_bigquery_denormalized.md index 95c0be1ae..0c6bbd5fc 100644 --- a/docs/resources/destination_bigquery_denormalized.md +++ b/docs/resources/destination_bigquery_denormalized.md @@ -18,7 +18,7 @@ resource "airbyte_destination_bigquery_denormalized" "my_destination_bigqueryden big_query_client_buffer_size_mb = 15 credentials_json = "...my_credentials_json..." dataset_id = "...my_dataset_id..." - dataset_location = "asia-southeast2" + dataset_location = "europe-west7" destination_type = "bigquery-denormalized" loading_method = { destination_bigquery_denormalized_loading_method_gcs_staging = { @@ -38,8 +38,8 @@ resource "airbyte_destination_bigquery_denormalized" "my_destination_bigqueryden } project_id = "...my_project_id..." } - name = "Lucia Kemmer" - workspace_id = "969e9a3e-fa77-4dfb-94cd-66ae395efb9b" + name = "Francisco Windler" + workspace_id = "c969e9a3-efa7-47df-b14c-d66ae395efb9" } ``` diff --git a/docs/resources/destination_clickhouse.md b/docs/resources/destination_clickhouse.md index 6d2933f25..5dbe84b6c 100644 --- a/docs/resources/destination_clickhouse.md +++ b/docs/resources/destination_clickhouse.md @@ -26,10 +26,10 @@ resource "airbyte_destination_clickhouse" "my_destination_clickhouse" { tunnel_method = "NO_TUNNEL" } } - username = "Jewell.Lesch64" + username = "Magdalena_Kuvalis" } - name = "Wilma Mosciski" - workspace_id = "074ba446-9b6e-4214-9959-890afa563e25" + name = "Sandy Huels" + workspace_id = "97074ba4-469b-46e2-9419-59890afa563e" } ``` diff --git a/docs/resources/destination_convex.md b/docs/resources/destination_convex.md index 30eb25e47..680d10d2b 100644 --- a/docs/resources/destination_convex.md +++ b/docs/resources/destination_convex.md @@ -19,8 +19,8 @@ resource "airbyte_destination_convex" "my_destination_convex" { deployment_url = "https://murky-swan-635.convex.cloud" destination_type = "convex" } - name = "Melba Toy" - workspace_id = "8b711e5b-7fd2-4ed0-a892-1cddc692601f" + name = "Joyce Kertzmann" + workspace_id = "4c8b711e-5b7f-4d2e-9028-921cddc69260" } ``` diff --git a/docs/resources/destination_cumulio.md b/docs/resources/destination_cumulio.md index 4149a2e80..29875b578 100644 --- a/docs/resources/destination_cumulio.md +++ b/docs/resources/destination_cumulio.md @@ -20,8 +20,8 @@ resource "airbyte_destination_cumulio" "my_destination_cumulio" { api_token = "...my_api_token..." destination_type = "cumulio" } - name = "Clyde Kling" - workspace_id = "0d5f0d30-c5fb-4b25-8705-3202c73d5fe9" + name = "Ebony Predovic" + workspace_id = "6b0d5f0d-30c5-4fbb-a587-053202c73d5f" } ``` diff --git a/docs/resources/destination_databend.md b/docs/resources/destination_databend.md index ab94a68a4..7cf563b9e 100644 --- a/docs/resources/destination_databend.md +++ b/docs/resources/destination_databend.md @@ -21,10 +21,10 @@ resource "airbyte_destination_databend" "my_destination_databend" { password = "...my_password..." port = 443 table = "default" - username = "Lauryn.Bartoletti50" + username = "Leo.Purdy" } - name = "Gary Mayert" - workspace_id = "fe49a8d9-cbf4-4863-b323-f9b77f3a4100" + name = "Bobby Kutch V" + workspace_id = "b3fe49a8-d9cb-4f48-a333-23f9b77f3a41" } ``` diff --git a/docs/resources/destination_databricks.md b/docs/resources/destination_databricks.md index 9c3beed0d..c8014b63a 100644 --- a/docs/resources/destination_databricks.md +++ b/docs/resources/destination_databricks.md @@ -28,11 +28,11 @@ resource "airbyte_destination_databricks" "my_destination_databricks" { databricks_server_hostname = "abc-12345678-wxyz.cloud.databricks.com" destination_type = "databricks" enable_schema_evolution = true - purge_staging_data = true + purge_staging_data = false schema = "default" } - name = "Rickey Wolf" - workspace_id = "280d1ba7-7a89-4ebf-b37a-e4203ce5e6a9" + name = "Bertha Thompson" + workspace_id = "69280d1b-a77a-489e-bf73-7ae4203ce5e6" } ``` diff --git a/docs/resources/destination_dev_null.md b/docs/resources/destination_dev_null.md index f330baa7d..4d2ed7135 100644 --- a/docs/resources/destination_dev_null.md +++ b/docs/resources/destination_dev_null.md @@ -22,8 +22,8 @@ resource "airbyte_destination_dev_null" "my_destination_devnull" { } } } - name = "Kari Leannon PhD" - workspace_id = "446ce2af-7a73-4cf3-be45-3f870b326b5a" + name = "Rene Hane" + workspace_id = "a0d446ce-2af7-4a73-8f3b-e453f870b326" } ``` diff --git a/docs/resources/destination_dynamodb.md b/docs/resources/destination_dynamodb.md index 4143852fc..dc59cc20e 100644 --- a/docs/resources/destination_dynamodb.md +++ b/docs/resources/destination_dynamodb.md @@ -18,12 +18,12 @@ resource "airbyte_destination_dynamodb" "my_destination_dynamodb" { access_key_id = "A012345678910EXAMPLE" destination_type = "dynamodb" dynamodb_endpoint = "http://localhost:9000" - dynamodb_region = "ap-southeast-2" + dynamodb_region = "eu-south-1" dynamodb_table_name_prefix = "airbyte_sync" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" } - name = "Amber Dibbert" - workspace_id = "db1a8422-bb67-49d2-b227-15bf0cbb1e31" + name = "Joanna Kohler" + workspace_id = "29cdb1a8-422b-4b67-9d23-22715bf0cbb1" } ``` diff --git a/docs/resources/destination_elasticsearch.md b/docs/resources/destination_elasticsearch.md index cd8bbf495..df6faf196 100644 --- a/docs/resources/destination_elasticsearch.md +++ b/docs/resources/destination_elasticsearch.md @@ -25,10 +25,10 @@ resource "airbyte_destination_elasticsearch" "my_destination_elasticsearch" { ca_certificate = "...my_ca_certificate..." destination_type = "elasticsearch" endpoint = "...my_endpoint..." - upsert = false + upsert = true } - name = "Dr. Randolph McDermott" - workspace_id = "443a1108-e0ad-4cf4-b921-879fce953f73" + name = "Carolyn Rohan" + workspace_id = "90f3443a-1108-4e0a-9cf4-b921879fce95" } ``` diff --git a/docs/resources/destination_firebolt.md b/docs/resources/destination_firebolt.md index 9641ddae9..6a7c63d78 100644 --- a/docs/resources/destination_firebolt.md +++ b/docs/resources/destination_firebolt.md @@ -32,8 +32,8 @@ resource "airbyte_destination_firebolt" "my_destination_firebolt" { password = "...my_password..." username = "username@email.com" } - name = "Jermaine Kuhic" - workspace_id = "d74dd39c-0f5d-42cf-b7c7-0a45626d4368" + name = "Roman Kulas" + workspace_id = "c7abd74d-d39c-40f5-92cf-f7c70a45626d" } ``` diff --git a/docs/resources/destination_firestore.md b/docs/resources/destination_firestore.md index 2553e322a..77652908d 100644 --- a/docs/resources/destination_firestore.md +++ b/docs/resources/destination_firestore.md @@ -19,8 +19,8 @@ resource "airbyte_destination_firestore" "my_destination_firestore" { destination_type = "firestore" project_id = "...my_project_id..." } - name = "Ms. Cindy Wuckert" - workspace_id = "9f5fce6c-5561-446c-be25-0fb008c42e14" + name = "Paula Jacobs I" + workspace_id = "f16d9f5f-ce6c-4556-946c-3e250fb008c4" } ``` diff --git a/docs/resources/destination_gcs.md b/docs/resources/destination_gcs.md index 4dc728eb4..e187baa31 100644 --- a/docs/resources/destination_gcs.md +++ b/docs/resources/destination_gcs.md @@ -35,10 +35,10 @@ resource "airbyte_destination_gcs" "my_destination_gcs" { } gcs_bucket_name = "airbyte_sync" gcs_bucket_path = "data_sync/test" - gcs_bucket_region = "us-east1" + gcs_bucket_region = "us-west1" } - name = "Neal Schroeder" - workspace_id = "6c8dd6b1-4429-4074-b477-8a7bd466d28c" + name = "Miss Dennis Friesen" + workspace_id = "c366c8dd-6b14-4429-8747-4778a7bd466d" } ``` diff --git a/docs/resources/destination_google_sheets.md b/docs/resources/destination_google_sheets.md index 39781d309..c628da1fd 100644 --- a/docs/resources/destination_google_sheets.md +++ b/docs/resources/destination_google_sheets.md @@ -23,8 +23,8 @@ resource "airbyte_destination_google_sheets" "my_destination_googlesheets" { destination_type = "google-sheets" spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" } - name = "Angela Olson" - workspace_id = "cdca4251-904e-4523-87e0-bc7178e4796f" + name = "Mr. Irma Schaefer" + workspace_id = "b3cdca42-5190-44e5-a3c7-e0bc7178e479" } ``` diff --git a/docs/resources/destination_keen.md b/docs/resources/destination_keen.md index 15ca1e449..55eeac9fe 100644 --- a/docs/resources/destination_keen.md +++ b/docs/resources/destination_keen.md @@ -17,11 +17,11 @@ resource "airbyte_destination_keen" "my_destination_keen" { configuration = { api_key = "ABCDEFGHIJKLMNOPRSTUWXYZ" destination_type = "keen" - infer_timestamp = true + infer_timestamp = false project_id = "58b4acc22ba938934e888322e" } - name = "Fernando Barton" - workspace_id = "88282aa4-8256-42f2-a2e9-817ee17cbe61" + name = "Todd Oberbrunner DDS" + workspace_id = "688282aa-4825-462f-a22e-9817ee17cbe6" } ``` diff --git a/docs/resources/destination_kinesis.md b/docs/resources/destination_kinesis.md index 268801c67..b788f2018 100644 --- a/docs/resources/destination_kinesis.md +++ b/docs/resources/destination_kinesis.md @@ -16,15 +16,15 @@ DestinationKinesis Resource resource "airbyte_destination_kinesis" "my_destination_kinesis" { configuration = { access_key = "...my_access_key..." - buffer_size = 9 + buffer_size = 1 destination_type = "kinesis" endpoint = "kinesis.us‑west‑1.amazonaws.com" private_key = "...my_private_key..." region = "us‑west‑1" - shard_count = 4 + shard_count = 9 } - name = "Javier Price" - workspace_id = "bc0ab3c2-0c4f-4378-9fd8-71f99dd2efd1" + name = "Opal Kozey" + workspace_id = "5bc0ab3c-20c4-4f37-89fd-871f99dd2efd" } ``` diff --git a/docs/resources/destination_langchain.md b/docs/resources/destination_langchain.md index 0811ee3b9..c6dc2e93e 100644 --- a/docs/resources/destination_langchain.md +++ b/docs/resources/destination_langchain.md @@ -29,15 +29,15 @@ resource "airbyte_destination_langchain" "my_destination_langchain" { } } processing = { - chunk_overlap = 1 + chunk_overlap = 0 chunk_size = 1 text_fields = [ "...", ] } } - name = "Shannon Jacobi DVM" - workspace_id = "674bdb04-f157-4560-82d6-8ea19f1d1705" + name = "Hattie Nader" + workspace_id = "1e674bdb-04f1-4575-a082-d68ea19f1d17" } ``` diff --git a/docs/resources/destination_milvus.md b/docs/resources/destination_milvus.md new file mode 100644 index 000000000..74d5422fb --- /dev/null +++ b/docs/resources/destination_milvus.md @@ -0,0 +1,291 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "airbyte_destination_milvus Resource - terraform-provider-airbyte" +subcategory: "" +description: |- + DestinationMilvus Resource +--- + +# airbyte_destination_milvus (Resource) + +DestinationMilvus Resource + +## Example Usage + +```terraform +resource "airbyte_destination_milvus" "my_destination_milvus" { + configuration = { + destination_type = "milvus" + embedding = { + destination_milvus_embedding_cohere = { + cohere_key = "...my_cohere_key..." + mode = "cohere" + } + } + indexing = { + auth = { + destination_milvus_indexing_authentication_api_token = { + mode = "token" + token = "...my_token..." + } + } + collection = "...my_collection..." + db = "...my_db..." + host = "https://my-instance.zone.zillizcloud.com" + text_field = "...my_text_field..." + vector_field = "...my_vector_field..." + } + processing = { + chunk_overlap = 3 + chunk_size = 0 + metadata_fields = [ + "...", + ] + text_fields = [ + "...", + ] + } + } + name = "Sherry Morar IV" + workspace_id = "086a1840-394c-4260-b1f9-3f5f0642dac7" +} +``` + + +## Schema + +### Required + +- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) +- `name` (String) +- `workspace_id` (String) + +### Read-Only + +- `destination_id` (String) +- `destination_type` (String) + + +### Nested Schema for `configuration` + +Required: + +- `destination_type` (String) must be one of ["milvus"] +- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding)) +- `indexing` (Attributes) Indexing configuration (see [below for nested schema](#nestedatt--configuration--indexing)) +- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing)) + + +### Nested Schema for `configuration.embedding` + +Optional: + +- `destination_milvus_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_cohere)) +- `destination_milvus_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_fake)) +- `destination_milvus_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_from_field)) +- `destination_milvus_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_embedding_open_ai)) +- `destination_milvus_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_cohere)) +- `destination_milvus_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_fake)) +- `destination_milvus_update_embedding_from_field` (Attributes) Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_from_field)) +- `destination_milvus_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_milvus_update_embedding_open_ai)) + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_cohere` + +Required: + +- `cohere_key` (String) + +Optional: + +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_fake` + +Optional: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_from_field` + +Required: + +- `dimensions` (Number) The number of dimensions the embedding model is generating +- `field_name` (String) Name of the field in the record that contains the embedding + +Optional: + +- `mode` (String) must be one of ["from_field"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_embedding_open_ai` + +Required: + +- `openai_key` (String) + +Optional: + +- `mode` (String) must be one of ["openai"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_cohere` + +Required: + +- `cohere_key` (String) + +Optional: + +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_fake` + +Optional: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_from_field` + +Required: + +- `dimensions` (Number) The number of dimensions the embedding model is generating +- `field_name` (String) Name of the field in the record that contains the embedding + +Optional: + +- `mode` (String) must be one of ["from_field"] + + + +### Nested Schema for `configuration.embedding.destination_milvus_update_embedding_open_ai` + +Required: + +- `openai_key` (String) + +Optional: + +- `mode` (String) must be one of ["openai"] + + + + +### Nested Schema for `configuration.indexing` + +Required: + +- `auth` (Attributes) Authentication method (see [below for nested schema](#nestedatt--configuration--indexing--auth)) +- `collection` (String) The collection to load data into +- `host` (String) The public endpoint of the Milvus instance. + +Optional: + +- `db` (String) The database to connect to +- `text_field` (String) The field in the entity that contains the embedded text +- `vector_field` (String) The field in the entity that contains the vector + + +### Nested Schema for `configuration.indexing.auth` + +Optional: + +- `destination_milvus_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_api_token)) +- `destination_milvus_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_no_auth)) +- `destination_milvus_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_indexing_authentication_username_password)) +- `destination_milvus_update_indexing_authentication_api_token` (Attributes) Authenticate using an API token (suitable for Zilliz Cloud) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_api_token)) +- `destination_milvus_update_indexing_authentication_no_auth` (Attributes) Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_no_auth)) +- `destination_milvus_update_indexing_authentication_username_password` (Attributes) Authenticate using username and password (suitable for self-managed Milvus clusters) (see [below for nested schema](#nestedatt--configuration--indexing--auth--destination_milvus_update_indexing_authentication_username_password)) + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Required: + +- `token` (String) API Token for the Milvus instance + +Optional: + +- `mode` (String) must be one of ["token"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Optional: + +- `mode` (String) must be one of ["no_auth"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Required: + +- `password` (String) Password for the Milvus instance +- `username` (String) Username for the Milvus instance + +Optional: + +- `mode` (String) must be one of ["username_password"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Required: + +- `token` (String) API Token for the Milvus instance + +Optional: + +- `mode` (String) must be one of ["token"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Optional: + +- `mode` (String) must be one of ["no_auth"] + + + +### Nested Schema for `configuration.indexing.auth.destination_milvus_update_indexing_authentication_username_password` + +Required: + +- `password` (String) Password for the Milvus instance +- `username` (String) Username for the Milvus instance + +Optional: + +- `mode` (String) must be one of ["username_password"] + + + + + +### Nested Schema for `configuration.processing` + +Required: + +- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + +Optional: + +- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context +- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. +- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + + diff --git a/docs/resources/destination_mongodb.md b/docs/resources/destination_mongodb.md index 0189013e4..472867bfe 100644 --- a/docs/resources/destination_mongodb.md +++ b/docs/resources/destination_mongodb.md @@ -19,7 +19,7 @@ resource "airbyte_destination_mongodb" "my_destination_mongodb" { destination_mongodb_authorization_type_login_password = { authorization = "login/password" password = "...my_password..." - username = "Asha61" + username = "Lucienne.Yundt" } } database = "...my_database..." @@ -36,8 +36,8 @@ resource "airbyte_destination_mongodb" "my_destination_mongodb" { } } } - name = "Ms. Kenneth Ledner" - workspace_id = "a1840394-c260-471f-93f5-f0642dac7af5" + name = "Robyn Schmitt I" + workspace_id = "aa63aae8-d678-464d-bb67-5fd5e60b375e" } ``` diff --git a/docs/resources/destination_mssql.md b/docs/resources/destination_mssql.md index b695d81fa..860001e3c 100644 --- a/docs/resources/destination_mssql.md +++ b/docs/resources/destination_mssql.md @@ -32,10 +32,10 @@ resource "airbyte_destination_mssql" "my_destination_mssql" { tunnel_method = "NO_TUNNEL" } } - username = "Elwyn.Sawayn7" + username = "Desiree_Yost" } - name = "Kristine Ondricka" - workspace_id = "aae8d678-64db-4b67-9fd5-e60b375ed4f6" + name = "Bert Treutel DVM" + workspace_id = "33317fe3-5b60-4eb1-aa42-6555ba3c2874" } ``` diff --git a/docs/resources/destination_mysql.md b/docs/resources/destination_mysql.md index 9b9eb397d..251fcdbc6 100644 --- a/docs/resources/destination_mysql.md +++ b/docs/resources/destination_mysql.md @@ -26,10 +26,10 @@ resource "airbyte_destination_mysql" "my_destination_mysql" { tunnel_method = "NO_TUNNEL" } } - username = "Mavis.Ullrich12" + username = "Sheldon.Smitham" } - name = "Chad Franey IV" - workspace_id = "fe35b60e-b1ea-4426-955b-a3c28744ed53" + name = "Guy Luettgen" + workspace_id = "a8d8f5c0-b2f2-4fb7-b194-a276b26916fe" } ``` diff --git a/docs/resources/destination_oracle.md b/docs/resources/destination_oracle.md index c0ff53697..7f4e8a16b 100644 --- a/docs/resources/destination_oracle.md +++ b/docs/resources/destination_oracle.md @@ -27,10 +27,10 @@ resource "airbyte_destination_oracle" "my_destination_oracle" { tunnel_method = "NO_TUNNEL" } } - username = "Jimmy.Luettgen63" + username = "Viviane_Aufderhar" } - name = "Dewey Leannon" - workspace_id = "c0b2f2fb-7b19-44a2-b6b2-6916fe1f08f4" + name = "Tammy Medhurst" + workspace_id = "3698f447-f603-4e8b-845e-80ca55efd20e" } ``` diff --git a/docs/resources/destination_pinecone.md b/docs/resources/destination_pinecone.md new file mode 100644 index 000000000..6f890911c --- /dev/null +++ b/docs/resources/destination_pinecone.md @@ -0,0 +1,170 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "airbyte_destination_pinecone Resource - terraform-provider-airbyte" +subcategory: "" +description: |- + DestinationPinecone Resource +--- + +# airbyte_destination_pinecone (Resource) + +DestinationPinecone Resource + +## Example Usage + +```terraform +resource "airbyte_destination_pinecone" "my_destination_pinecone" { + configuration = { + destination_type = "pinecone" + embedding = { + destination_pinecone_embedding_cohere = { + cohere_key = "...my_cohere_key..." + mode = "cohere" + } + } + indexing = { + index = "...my_index..." + pinecone_environment = "...my_pinecone_environment..." + pinecone_key = "...my_pinecone_key..." + } + processing = { + chunk_overlap = 2 + chunk_size = 3 + metadata_fields = [ + "...", + ] + text_fields = [ + "...", + ] + } + } + name = "Cecelia Braun" + workspace_id = "8b6a89fb-e3a5-4aa8-a482-4d0ab4075088" +} +``` + + +## Schema + +### Required + +- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) +- `name` (String) +- `workspace_id` (String) + +### Read-Only + +- `destination_id` (String) +- `destination_type` (String) + + +### Nested Schema for `configuration` + +Required: + +- `destination_type` (String) must be one of ["pinecone"] +- `embedding` (Attributes) Embedding configuration (see [below for nested schema](#nestedatt--configuration--embedding)) +- `indexing` (Attributes) Pinecone is a popular vector store that can be used to store and retrieve embeddings. (see [below for nested schema](#nestedatt--configuration--indexing)) +- `processing` (Attributes) (see [below for nested schema](#nestedatt--configuration--processing)) + + +### Nested Schema for `configuration.embedding` + +Optional: + +- `destination_pinecone_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_cohere)) +- `destination_pinecone_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_fake)) +- `destination_pinecone_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_embedding_open_ai)) +- `destination_pinecone_update_embedding_cohere` (Attributes) Use the Cohere API to embed text. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_cohere)) +- `destination_pinecone_update_embedding_fake` (Attributes) Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_fake)) +- `destination_pinecone_update_embedding_open_ai` (Attributes) Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. (see [below for nested schema](#nestedatt--configuration--embedding--destination_pinecone_update_embedding_open_ai)) + + +### Nested Schema for `configuration.embedding.destination_pinecone_embedding_cohere` + +Required: + +- `cohere_key` (String) + +Optional: + +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_embedding_fake` + +Optional: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_embedding_open_ai` + +Required: + +- `openai_key` (String) + +Optional: + +- `mode` (String) must be one of ["openai"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_cohere` + +Required: + +- `cohere_key` (String) + +Optional: + +- `mode` (String) must be one of ["cohere"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_fake` + +Optional: + +- `mode` (String) must be one of ["fake"] + + + +### Nested Schema for `configuration.embedding.destination_pinecone_update_embedding_open_ai` + +Required: + +- `openai_key` (String) + +Optional: + +- `mode` (String) must be one of ["openai"] + + + + +### Nested Schema for `configuration.indexing` + +Required: + +- `index` (String) Pinecone index to use +- `pinecone_environment` (String) Pinecone environment to use +- `pinecone_key` (String) + + + +### Nested Schema for `configuration.processing` + +Required: + +- `chunk_size` (Number) Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + +Optional: + +- `chunk_overlap` (Number) Size of overlap between chunks in tokens to store in vector store to better capture relevant context +- `metadata_fields` (List of String) List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. +- `text_fields` (List of String) List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + + diff --git a/docs/resources/destination_postgres.md b/docs/resources/destination_postgres.md index ae3261f37..b75470f08 100644 --- a/docs/resources/destination_postgres.md +++ b/docs/resources/destination_postgres.md @@ -32,10 +32,10 @@ resource "airbyte_destination_postgres" "my_destination_postgres" { tunnel_method = "NO_TUNNEL" } } - username = "Laron.Gerlach40" + username = "Foster.Borer" } - name = "Isaac Wolf" - workspace_id = "7f603e8b-445e-480c-a55e-fd20e457e185" + name = "Karen Kautzer" + workspace_id = "904f3b11-94b8-4abf-a03a-79f9dfe0ab7d" } ``` diff --git a/docs/resources/destination_pubsub.md b/docs/resources/destination_pubsub.md index c4adcefb5..e4efa0825 100644 --- a/docs/resources/destination_pubsub.md +++ b/docs/resources/destination_pubsub.md @@ -15,18 +15,18 @@ DestinationPubsub Resource ```terraform resource "airbyte_destination_pubsub" "my_destination_pubsub" { configuration = { - batching_delay_threshold = 5 - batching_element_count_threshold = 8 - batching_enabled = false - batching_request_bytes_threshold = 7 + batching_delay_threshold = 7 + batching_element_count_threshold = 5 + batching_enabled = true + batching_request_bytes_threshold = 3 credentials_json = "...my_credentials_json..." destination_type = "pubsub" - ordering_enabled = false + ordering_enabled = true project_id = "...my_project_id..." topic_id = "...my_topic_id..." } - name = "Irvin Rath" - workspace_id = "a5aa8e48-24d0-4ab4-8750-88e51862065e" + name = "Phil Boyer" + workspace_id = "f86bc173-d689-4eee-9526-f8d986e881ea" } ``` diff --git a/docs/resources/destination_redis.md b/docs/resources/destination_redis.md index 02e010cc4..3392155a9 100644 --- a/docs/resources/destination_redis.md +++ b/docs/resources/destination_redis.md @@ -19,7 +19,7 @@ resource "airbyte_destination_redis" "my_destination_redis" { destination_type = "redis" host = "localhost,127.0.0.1" password = "...my_password..." - port = 6 + port = 9 ssl = false ssl_mode = { destination_redis_ssl_modes_disable = { @@ -31,10 +31,10 @@ resource "airbyte_destination_redis" "my_destination_redis" { tunnel_method = "NO_TUNNEL" } } - username = "Duncan69" + username = "Vivianne.Baumbach3" } - name = "Diane Mayer" - workspace_id = "8abf603a-79f9-4dfe-8ab7-da8a50ce187f" + name = "Bonnie Halvorson" + workspace_id = "f94e29e9-73e9-422a-97a1-5be3e060807e" } ``` diff --git a/docs/resources/destination_redshift.md b/docs/resources/destination_redshift.md index b46904168..cf6d52877 100644 --- a/docs/resources/destination_redshift.md +++ b/docs/resources/destination_redshift.md @@ -37,19 +37,19 @@ resource "airbyte_destination_redshift" "my_destination_redshift" { } } file_buffer_count = 10 - file_name_pattern = "{part_number}" + file_name_pattern = "{timestamp}" method = "S3 Staging" - purge_staging_data = true + purge_staging_data = false s3_bucket_name = "airbyte.staging" s3_bucket_path = "data_sync/test" - s3_bucket_region = "us-east-2" + s3_bucket_region = "us-west-2" secret_access_key = "...my_secret_access_key..." } } - username = "Jairo.Farrell53" + username = "Margarette_Rau" } - name = "Frankie Torphy" - workspace_id = "526f8d98-6e88-41ea-94f0-e1012563f94e" + name = "Mrs. Geraldine Zulauf" + workspace_id = "7a60ff2a-54a3-41e9-8764-a3e865e7956f" } ``` diff --git a/docs/resources/destination_s3.md b/docs/resources/destination_s3.md index 007363a46..38141cdd1 100644 --- a/docs/resources/destination_s3.md +++ b/docs/resources/destination_s3.md @@ -17,7 +17,7 @@ resource "airbyte_destination_s3" "my_destination_s3" { configuration = { access_key_id = "A012345678910EXAMPLE" destination_type = "s3" - file_name_pattern = "{date}" + file_name_pattern = "{timestamp}" format = { destination_s3_output_format_avro_apache_avro = { compression_codec = { @@ -30,13 +30,13 @@ resource "airbyte_destination_s3" "my_destination_s3" { } s3_bucket_name = "airbyte_sync" s3_bucket_path = "data_sync/test" - s3_bucket_region = "cn-northwest-1" + s3_bucket_region = "us-west-1" s3_endpoint = "http://localhost:9000" s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" } - name = "Freddie King" - workspace_id = "922a57a1-5be3-4e06-8807-e2b6e3ab8845" + name = "Joyce O'Kon" + workspace_id = "9da660ff-57bf-4aad-8f9e-fc1b4512c103" } ``` diff --git a/docs/resources/destination_s3_glue.md b/docs/resources/destination_s3_glue.md index 3a2fbe948..02f7619d4 100644 --- a/docs/resources/destination_s3_glue.md +++ b/docs/resources/destination_s3_glue.md @@ -17,7 +17,7 @@ resource "airbyte_destination_s3_glue" "my_destination_s3glue" { configuration = { access_key_id = "A012345678910EXAMPLE" destination_type = "s3-glue" - file_name_pattern = "{sync_id}" + file_name_pattern = "{date}" format = { destination_s3_glue_output_format_json_lines_newline_delimited_json = { compression = { @@ -33,13 +33,13 @@ resource "airbyte_destination_s3_glue" "my_destination_s3glue" { glue_serialization_library = "org.openx.data.jsonserde.JsonSerDe" s3_bucket_name = "airbyte_sync" s3_bucket_path = "data_sync/test" - s3_bucket_region = "cn-northwest-1" + s3_bucket_region = "ca-central-1" s3_endpoint = "http://localhost:9000" s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" } - name = "Dr. Iris Hodkiewicz" - workspace_id = "2a54a31e-9476-44a3-a865-e7956f9251a5" + name = "Edmund Daugherty" + workspace_id = "15199ebf-d0e9-4fe6-8632-ca3aed011799" } ``` diff --git a/docs/resources/destination_sftp_json.md b/docs/resources/destination_sftp_json.md index 99fae4591..a7e3c7f07 100644 --- a/docs/resources/destination_sftp_json.md +++ b/docs/resources/destination_sftp_json.md @@ -20,10 +20,10 @@ resource "airbyte_destination_sftp_json" "my_destination_sftpjson" { host = "...my_host..." password = "...my_password..." port = 22 - username = "Lane.Schuster" + username = "Dayton98" } - name = "Ruth Zulauf" - workspace_id = "7bfaad4f-9efc-41b4-912c-1032648dc2f6" + name = "Terence Beer" + workspace_id = "71778ff6-1d01-4747-a360-a15db6a66065" } ``` diff --git a/docs/resources/destination_snowflake.md b/docs/resources/destination_snowflake.md index 8218e081d..53a90329c 100644 --- a/docs/resources/destination_snowflake.md +++ b/docs/resources/destination_snowflake.md @@ -24,17 +24,16 @@ resource "airbyte_destination_snowflake" "my_destination_snowflake" { } database = "AIRBYTE_DATABASE" destination_type = "snowflake" - host = "accountname.us-east-2.aws.snowflakecomputing.com" + host = "accountname.snowflakecomputing.com" jdbc_url_params = "...my_jdbc_url_params..." raw_data_schema = "...my_raw_data_schema..." role = "AIRBYTE_ROLE" schema = "AIRBYTE_SCHEMA" - use_1s1t_format = true username = "AIRBYTE_USER" warehouse = "AIRBYTE_WAREHOUSE" } - name = "Dr. Terrell Stanton" - workspace_id = "fe6c632c-a3ae-4d01-9799-6312fde04771" + name = "Shaun Osinski" + workspace_id = "851d6c64-5b08-4b61-891b-aa0fe1ade008" } ``` @@ -69,8 +68,7 @@ Optional: - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) - `jdbc_url_params` (String) Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 -- `raw_data_schema` (String) (Beta) The schema to write raw tables into -- `use_1s1t_format` (Boolean) (Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program. +- `raw_data_schema` (String) The schema to write raw tables into ### Nested Schema for `configuration.credentials` diff --git a/docs/resources/destination_timeplus.md b/docs/resources/destination_timeplus.md index e7adcf6a3..30302c1a5 100644 --- a/docs/resources/destination_timeplus.md +++ b/docs/resources/destination_timeplus.md @@ -19,8 +19,8 @@ resource "airbyte_destination_timeplus" "my_destination_timeplus" { destination_type = "timeplus" endpoint = "https://us.timeplus.cloud/workspace_id" } - name = "Tamara Lang" - workspace_id = "61d01747-6360-4a15-9b6a-660659a1adea" + name = "Ruben Williamson" + workspace_id = "5f350d8c-db5a-4341-8143-010421813d52" } ``` diff --git a/docs/resources/destination_typesense.md b/docs/resources/destination_typesense.md index 143c4212e..d6f7345d8 100644 --- a/docs/resources/destination_typesense.md +++ b/docs/resources/destination_typesense.md @@ -16,14 +16,14 @@ DestinationTypesense Resource resource "airbyte_destination_typesense" "my_destination_typesense" { configuration = { api_key = "...my_api_key..." - batch_size = "...my_batch_size..." + batch_size = 0 destination_type = "typesense" host = "...my_host..." port = "...my_port..." protocol = "...my_protocol..." } - name = "Wm Hane" - workspace_id = "1d6c645b-08b6-4189-9baa-0fe1ade008e6" + name = "Conrad Rutherford" + workspace_id = "e253b668-451c-46c6-a205-e16deab3fec9" } ``` @@ -52,7 +52,7 @@ Required: Optional: -- `batch_size` (String) How many documents should be imported together. Default 1000 +- `batch_size` (Number) How many documents should be imported together. Default 1000 - `port` (String) Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443 - `protocol` (String) Protocol of the Typesense instance. Ex: http or https. Default is https diff --git a/docs/resources/destination_vertica.md b/docs/resources/destination_vertica.md index accc415de..f4974a20b 100644 --- a/docs/resources/destination_vertica.md +++ b/docs/resources/destination_vertica.md @@ -27,10 +27,10 @@ resource "airbyte_destination_vertica" "my_destination_vertica" { tunnel_method = "NO_TUNNEL" } } - username = "Judge_Schinner" + username = "Jackson.Kuvalis" } - name = "Marion Aufderhar" - workspace_id = "cdb5a341-8143-4010-8218-13d5208ece7e" + name = "Ida Lubowitz" + workspace_id = "73a8418d-1623-409f-b092-9921aefb9f58" } ``` diff --git a/docs/resources/destination_xata.md b/docs/resources/destination_xata.md index 4de513ffb..a05ab3edd 100644 --- a/docs/resources/destination_xata.md +++ b/docs/resources/destination_xata.md @@ -19,8 +19,8 @@ resource "airbyte_destination_xata" "my_destination_xata" { db_url = "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main" destination_type = "xata" } - name = "Sally Dooley" - workspace_id = "68451c6c-6e20-45e1-adea-b3fec9578a64" + name = "Oscar Smith" + workspace_id = "e68e4be0-5601-43f5-9da7-57a59ecfef66" } ``` diff --git a/docs/resources/source_aha.md b/docs/resources/source_aha.md index b8068102b..df1b71d29 100644 --- a/docs/resources/source_aha.md +++ b/docs/resources/source_aha.md @@ -19,9 +19,9 @@ resource "airbyte_source_aha" "my_source_aha" { source_type = "aha" url = "...my_url..." } - name = "Brandy Gibson" + name = "Van Bergnaum" secret_id = "...my_secret_id..." - workspace_id = "3a8418d1-6230-49fb-8929-921aefb9f58c" + workspace_id = "a3383c2b-eb47-4737-bc8d-72f64d1db1f2" } ``` diff --git a/docs/resources/source_aircall.md b/docs/resources/source_aircall.md index 6565c4dc6..8576bc2f6 100644 --- a/docs/resources/source_aircall.md +++ b/docs/resources/source_aircall.md @@ -20,9 +20,9 @@ resource "airbyte_source_aircall" "my_source_aircall" { source_type = "aircall" start_date = "2022-03-01T00:00:00.000Z" } - name = "Leslie Waters" + name = "Martha Bashirian" secret_id = "...my_secret_id..." - workspace_id = "e4be0560-13f5-49da-b57a-59ecfef66ef1" + workspace_id = "1e96349e-1cf9-4e06-a3a4-37000ae6b6bc" } ``` diff --git a/docs/resources/source_airtable.md b/docs/resources/source_airtable.md index 965692dae..2e257fa5e 100644 --- a/docs/resources/source_airtable.md +++ b/docs/resources/source_airtable.md @@ -22,14 +22,14 @@ resource "airbyte_source_airtable" "my_source_airtable" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2020-12-11T09:39:15.481Z" + token_expiry_date = "2021-08-01T09:41:55.270Z" } } source_type = "airtable" } - name = "Vincent Frami" + name = "Tommie Klocko" secret_id = "...my_secret_id..." - workspace_id = "c2beb477-373c-48d7-af64-d1db1f2c4310" + workspace_id = "eac55a97-41d3-4113-9296-5bb8a7202611" } ``` diff --git a/docs/resources/source_alloydb.md b/docs/resources/source_alloydb.md index 2663e4326..4d3d54abc 100644 --- a/docs/resources/source_alloydb.md +++ b/docs/resources/source_alloydb.md @@ -22,12 +22,12 @@ resource "airbyte_source_alloydb" "my_source_alloydb" { port = 5432 replication_method = { source_alloydb_replication_method_logical_replication_cdc_ = { - initial_waiting_seconds = 4 + initial_waiting_seconds = 2 lsn_commit_behaviour = "While reading Data" method = "CDC" plugin = "pgoutput" publication = "...my_publication..." - queue_size = 9 + queue_size = 10 replication_slot = "...my_replication_slot..." } } @@ -45,11 +45,11 @@ resource "airbyte_source_alloydb" "my_source_alloydb" { tunnel_method = "NO_TUNNEL" } } - username = "Keon28" + username = "Ashlynn_Emard" } - name = "Merle Carroll" + name = "Wilbert Crona" secret_id = "...my_secret_id..." - workspace_id = "9e06e3a4-3700-40ae-ab6b-c9b8f759eac5" + workspace_id = "9b1abda8-c070-4e10-84cb-0672d1ad879e" } ``` diff --git a/docs/resources/source_amazon_ads.md b/docs/resources/source_amazon_ads.md index 21b28c018..9313194aa 100644 --- a/docs/resources/source_amazon_ads.md +++ b/docs/resources/source_amazon_ads.md @@ -18,14 +18,17 @@ resource "airbyte_source_amazon_ads" "my_source_amazonads" { auth_type = "oauth2.0" client_id = "...my_client_id..." client_secret = "...my_client_secret..." - look_back_window = 3 + look_back_window = 10 + marketplace_ids = [ + "...", + ] profiles = [ 6, ] refresh_token = "...my_refresh_token..." region = "EU" report_record_types = [ - "asins_keywords", + "asins_targets", ] source_type = "amazon-ads" start_date = "2022-10-10" @@ -33,9 +36,9 @@ resource "airbyte_source_amazon_ads" "my_source_amazonads" { "archived", ] } - name = "Evelyn Bode" + name = "Dan Towne" secret_id = "...my_secret_id..." - workspace_id = "2965bb8a-7202-4611-835e-139dbc2259b1" + workspace_id = "d02bae0b-e2d7-4822-99e3-ea4b5197f924" } ``` @@ -71,7 +74,8 @@ Optional: - `auth_type` (String) must be one of ["oauth2.0"] - `look_back_window` (Number) The amount of days to go back in time to get the updated data from Amazon Ads -- `profiles` (List of Number) Profile IDs you want to fetch data for. See docs for more details. +- `marketplace_ids` (List of String) Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. +- `profiles` (List of Number) Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. - `region` (String) must be one of ["NA", "EU", "FE"] Region to pull data from (EU/NA/FE). See docs for more details. - `report_record_types` (List of String) Optional configuration which accepts an array of string of record types. Leave blank for default behaviour to pull all report types. Use this config option only if you want to pull specific report type(s). See docs for more details diff --git a/docs/resources/source_amazon_seller_partner.md b/docs/resources/source_amazon_seller_partner.md index 82f0273f0..2a1f4205f 100644 --- a/docs/resources/source_amazon_seller_partner.md +++ b/docs/resources/source_amazon_seller_partner.md @@ -15,26 +15,26 @@ SourceAmazonSellerPartner Resource ```terraform resource "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" { configuration = { - advanced_stream_options = "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}" + advanced_stream_options = "{\"GET_SALES_AND_TRAFFIC_REPORT\": {\"availability_sla_days\": 3}}" auth_type = "oauth2.0" aws_access_key = "...my_aws_access_key..." - aws_environment = "SANDBOX" + aws_environment = "PRODUCTION" aws_secret_key = "...my_aws_secret_key..." lwa_app_id = "...my_lwa_app_id..." lwa_client_secret = "...my_lwa_client_secret..." max_wait_seconds = 1980 - period_in_days = 6 + period_in_days = 5 refresh_token = "...my_refresh_token..." - region = "SE" + region = "SA" replication_end_date = "2017-01-25T00:00:00Z" replication_start_date = "2017-01-25T00:00:00Z" - report_options = "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"}}" + report_options = "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}" role_arn = "...my_role_arn..." source_type = "amazon-seller-partner" } - name = "Mr. Angela Volkman" + name = "Phyllis Quitzon" secret_id = "...my_secret_id..." - workspace_id = "4cb0672d-1ad8-479e-ab96-65b85efbd02b" + workspace_id = "5c537c64-54ef-4b0b-b489-6c3ca5acfbe2" } ``` diff --git a/docs/resources/source_amazon_sqs.md b/docs/resources/source_amazon_sqs.md index 77d204d20..7ea4bb73b 100644 --- a/docs/resources/source_amazon_sqs.md +++ b/docs/resources/source_amazon_sqs.md @@ -17,18 +17,18 @@ resource "airbyte_source_amazon_sqs" "my_source_amazonsqs" { configuration = { access_key = "xxxxxHRNxxx3TBxxxxxx" attributes_to_return = "attr1,attr2" - delete_messages = true + delete_messages = false max_batch_size = 5 max_wait_time = 5 queue_url = "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" - region = "eu-south-1" + region = "ap-southeast-2" secret_key = "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" source_type = "amazon-sqs" visibility_timeout = 15 } - name = "Rosemarie Kub" + name = "Cathy Kirlin" secret_id = "...my_secret_id..." - workspace_id = "259e3ea4-b519-47f9-a443-da7ce52b895c" + workspace_id = "29177dea-c646-4ecb-9734-09e3eb1e5a2b" } ``` diff --git a/docs/resources/source_amplitude.md b/docs/resources/source_amplitude.md index 2788958e9..afa37370c 100644 --- a/docs/resources/source_amplitude.md +++ b/docs/resources/source_amplitude.md @@ -17,14 +17,14 @@ resource "airbyte_source_amplitude" "my_source_amplitude" { configuration = { api_key = "...my_api_key..." data_region = "Standard Server" - request_time_range = 2 + request_time_range = 1 secret_key = "...my_secret_key..." source_type = "amplitude" start_date = "2021-01-25T00:00:00Z" } - name = "Sadie Kemmer" + name = "Robin Bednar" secret_id = "...my_secret_id..." - workspace_id = "4efb0b34-896c-43ca-9acf-be2fd5707577" + workspace_id = "116db995-45fc-495f-a889-70e189dbb30f" } ``` diff --git a/docs/resources/source_apify_dataset.md b/docs/resources/source_apify_dataset.md index bb2b2336b..dc02b94ef 100644 --- a/docs/resources/source_apify_dataset.md +++ b/docs/resources/source_apify_dataset.md @@ -18,10 +18,11 @@ resource "airbyte_source_apify_dataset" "my_source_apifydataset" { clean = true dataset_id = "...my_dataset_id..." source_type = "apify-dataset" + token = "Personal API tokens" } - name = "Natasha Bogan" + name = "Dale Ferry" secret_id = "...my_secret_id..." - workspace_id = "deac646e-cb57-4340-9e3e-b1e5a2b12eb0" + workspace_id = "055b197c-d44e-42f5-ad82-d3513bb6f48b" } ``` @@ -48,11 +49,12 @@ resource "airbyte_source_apify_dataset" "my_source_apifydataset" { Required: -- `dataset_id` (String) ID of the dataset you would like to load to Airbyte. - `source_type` (String) must be one of ["apify-dataset"] +- `token` (String) Your application's Client Secret. You can find this value on the console integrations tab after you login. Optional: - `clean` (Boolean) If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false. +- `dataset_id` (String) ID of the dataset you would like to load to Airbyte. diff --git a/docs/resources/source_appfollow.md b/docs/resources/source_appfollow.md index 3e78505cc..7f6e36ef9 100644 --- a/docs/resources/source_appfollow.md +++ b/docs/resources/source_appfollow.md @@ -18,9 +18,9 @@ resource "airbyte_source_appfollow" "my_source_appfollow" { api_secret = "...my_api_secret..." source_type = "appfollow" } - name = "Mrs. Elisa Bogisich" + name = "Regina Huel" secret_id = "...my_secret_id..." - workspace_id = "b99545fc-95fa-4889-b0e1-89dbb30fcb33" + workspace_id = "db35ff2e-4b27-4537-a8cd-9e7319c177d5" } ``` diff --git a/docs/resources/source_asana.md b/docs/resources/source_asana.md index 94e6fe159..63b256530 100644 --- a/docs/resources/source_asana.md +++ b/docs/resources/source_asana.md @@ -25,9 +25,9 @@ resource "airbyte_source_asana" "my_source_asana" { } source_type = "asana" } - name = "Doug Baumbach" + name = "Jill Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "b197cd44-e2f5-42d8-ad35-13bb6f48b656" + workspace_id = "b114eeb5-2ff7-485f-8378-14d4c98e0c2b" } ``` diff --git a/docs/resources/source_auth0.md b/docs/resources/source_auth0.md index 0b3371c42..52fc91890 100644 --- a/docs/resources/source_auth0.md +++ b/docs/resources/source_auth0.md @@ -23,10 +23,11 @@ resource "airbyte_source_auth0" "my_source_auth0" { } } source_type = "auth0" + start_date = "2023-08-05T00:43:59.244Z" } - name = "Roosevelt Schultz" + name = "Willard McLaughlin" secret_id = "...my_secret_id..." - workspace_id = "5ff2e4b2-7537-4a8c-99e7-319c177d525f" + workspace_id = "75dad636-c600-4503-98bb-31180f739ae9" } ``` @@ -57,6 +58,10 @@ Required: - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) - `source_type` (String) must be one of ["auth0"] +Optional: + +- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. + ### Nested Schema for `configuration.credentials` diff --git a/docs/resources/source_aws_cloudtrail.md b/docs/resources/source_aws_cloudtrail.md index 337074b23..ae5eda7dc 100644 --- a/docs/resources/source_aws_cloudtrail.md +++ b/docs/resources/source_aws_cloudtrail.md @@ -21,9 +21,9 @@ resource "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" { source_type = "aws-cloudtrail" start_date = "2021-01-01" } - name = "Gregory Bernhard" + name = "Nellie Waters" secret_id = "...my_secret_id..." - workspace_id = "eb52ff78-5fc3-4781-8d4c-98e0c2bb89eb" + workspace_id = "09e28103-31f3-4981-94c7-00b607f3c93c" } ``` diff --git a/docs/resources/source_azure_blob_storage.md b/docs/resources/source_azure_blob_storage.md index 625762ac0..a6896cfbb 100644 --- a/docs/resources/source_azure_blob_storage.md +++ b/docs/resources/source_azure_blob_storage.md @@ -28,9 +28,9 @@ resource "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" { } source_type = "azure-blob-storage" } - name = "Cristina Murphy" + name = "Patty Mraz" secret_id = "...my_secret_id..." - workspace_id = "36c60050-3d8b-4b31-980f-739ae9e057eb" + workspace_id = "3f2ceda7-e23f-4225-b411-faf4b7544e47" } ``` diff --git a/docs/resources/source_azure_table.md b/docs/resources/source_azure_table.md index b73d07e42..608c2aab8 100644 --- a/docs/resources/source_azure_table.md +++ b/docs/resources/source_azure_table.md @@ -18,11 +18,11 @@ resource "airbyte_source_azure_table" "my_source_azuretable" { source_type = "azure-table" storage_access_key = "...my_storage_access_key..." storage_account_name = "...my_storage_account_name..." - storage_endpoint_suffix = "core.chinacloudapi.cn" + storage_endpoint_suffix = "core.windows.net" } - name = "Bobbie Thompson" + name = "Ian Baumbach" secret_id = "...my_secret_id..." - workspace_id = "10331f39-81d4-4c70-8b60-7f3c93c73b9d" + workspace_id = "57a5b404-63a7-4d57-9f14-00e764ad7334" } ``` diff --git a/docs/resources/source_bamboo_hr.md b/docs/resources/source_bamboo_hr.md index 910edc75a..7e8bce413 100644 --- a/docs/resources/source_bamboo_hr.md +++ b/docs/resources/source_bamboo_hr.md @@ -21,9 +21,9 @@ resource "airbyte_source_bamboo_hr" "my_source_bamboohr" { source_type = "bamboo-hr" subdomain = "...my_subdomain..." } - name = "Mandy Collier" + name = "Ralph Rau" secret_id = "...my_secret_id..." - workspace_id = "da7e23f2-2574-411f-af4b-7544e472e802" + workspace_id = "1b36a080-88d1-400e-bada-200ef0422eb2" } ``` diff --git a/docs/resources/source_bigcommerce.md b/docs/resources/source_bigcommerce.md index 4c74c2e70..e850994f9 100644 --- a/docs/resources/source_bigcommerce.md +++ b/docs/resources/source_bigcommerce.md @@ -20,9 +20,9 @@ resource "airbyte_source_bigcommerce" "my_source_bigcommerce" { start_date = "2021-01-01" store_hash = "...my_store_hash..." } - name = "Bill Kling" + name = "Beth Gleason" secret_id = "...my_secret_id..." - workspace_id = "b40463a7-d575-4f14-80e7-64ad7334ec1b" + workspace_id = "9ab8366c-723f-4fda-9e06-bee4825c1fc0" } ``` diff --git a/docs/resources/source_bigquery.md b/docs/resources/source_bigquery.md index a7235ba95..5d4224fa6 100644 --- a/docs/resources/source_bigquery.md +++ b/docs/resources/source_bigquery.md @@ -20,9 +20,9 @@ resource "airbyte_source_bigquery" "my_source_bigquery" { project_id = "...my_project_id..." source_type = "bigquery" } - name = "Tracey Bosco" + name = "Joe Bradtke" secret_id = "...my_secret_id..." - workspace_id = "6a08088d-100e-4fad-a200-ef0422eb2164" + workspace_id = "80bff918-544e-4c42-9efc-ce8f1977773e" } ``` diff --git a/docs/resources/source_bing_ads.md b/docs/resources/source_bing_ads.md index e619830eb..c209d01ae 100644 --- a/docs/resources/source_bing_ads.md +++ b/docs/resources/source_bing_ads.md @@ -19,15 +19,15 @@ resource "airbyte_source_bing_ads" "my_source_bingads" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." developer_token = "...my_developer_token..." - lookback_window = 8 + lookback_window = 4 refresh_token = "...my_refresh_token..." - reports_start_date = "2021-04-24" + reports_start_date = "2022-08-23" source_type = "bing-ads" tenant_id = "...my_tenant_id..." } - name = "Randolph Lebsack" + name = "Kathryn Nitzsche" secret_id = "...my_secret_id..." - workspace_id = "6c723ffd-a9e0-46be-a482-5c1fc0e115c8" + workspace_id = "408f05e3-d48f-4daf-b13a-1f5fd94259c0" } ``` diff --git a/docs/resources/source_braintree.md b/docs/resources/source_braintree.md index 51bc9de91..e4b899ddb 100644 --- a/docs/resources/source_braintree.md +++ b/docs/resources/source_braintree.md @@ -15,16 +15,16 @@ SourceBraintree Resource ```terraform resource "airbyte_source_braintree" "my_source_braintree" { configuration = { - environment = "Qa" + environment = "Development" merchant_id = "...my_merchant_id..." private_key = "...my_private_key..." public_key = "...my_public_key..." source_type = "braintree" - start_date = "2020-11-22 20:20:05" + start_date = "2020-12-30" } - name = "Olga Hermiston" + name = "Henrietta Nienow" secret_id = "...my_secret_id..." - workspace_id = "ec42defc-ce8f-4197-b773-e63562a7b408" + workspace_id = "4f3b756c-11f6-4c37-a512-6243835bbc05" } ``` diff --git a/docs/resources/source_braze.md b/docs/resources/source_braze.md index 04277f525..9b54f5633 100644 --- a/docs/resources/source_braze.md +++ b/docs/resources/source_braze.md @@ -17,12 +17,12 @@ resource "airbyte_source_braze" "my_source_braze" { configuration = { api_key = "...my_api_key..." source_type = "braze" - start_date = "2022-11-23" + start_date = "2022-09-06" url = "...my_url..." } - name = "Tricia DuBuque" + name = "Rosie Glover" secret_id = "...my_secret_id..." - workspace_id = "8fdaf313-a1f5-4fd9-8259-c0b36f25ea94" + workspace_id = "efc5fde1-0a0c-4e21-a9e5-10019c6dc5e3" } ``` diff --git a/docs/resources/source_chargebee.md b/docs/resources/source_chargebee.md index 34b6a217d..441d8aec3 100644 --- a/docs/resources/source_chargebee.md +++ b/docs/resources/source_chargebee.md @@ -21,9 +21,9 @@ resource "airbyte_source_chargebee" "my_source_chargebee" { source_type = "chargebee" start_date = "2021-01-25T00:00:00Z" } - name = "Harvey Harber" + name = "Viola Morissette" secret_id = "...my_secret_id..." - workspace_id = "11f6c37a-5126-4243-835b-bc05a23a45ce" + workspace_id = "fbbe6949-fb2b-4b4e-8ae6-c3d5db3adebd" } ``` diff --git a/docs/resources/source_chartmogul.md b/docs/resources/source_chartmogul.md index 5b1b4c645..eae9586b4 100644 --- a/docs/resources/source_chartmogul.md +++ b/docs/resources/source_chartmogul.md @@ -16,13 +16,13 @@ SourceChartmogul Resource resource "airbyte_source_chartmogul" "my_source_chartmogul" { configuration = { api_key = "...my_api_key..." - interval = "quarter" + interval = "week" source_type = "chartmogul" start_date = "2017-01-25T00:00:00Z" } - name = "Mr. Kristopher Torphy" + name = "Neal Gorczany" secret_id = "...my_secret_id..." - workspace_id = "0ce2169e-5100-419c-adc5-e34762799bfb" + workspace_id = "06a8aa94-c026-444c-b5e9-d9a4578adc1a" } ``` diff --git a/docs/resources/source_clickhouse.md b/docs/resources/source_clickhouse.md index e528438b8..0752f6a3e 100644 --- a/docs/resources/source_clickhouse.md +++ b/docs/resources/source_clickhouse.md @@ -25,11 +25,11 @@ resource "airbyte_source_clickhouse" "my_source_clickhouse" { tunnel_method = "NO_TUNNEL" } } - username = "Toni.Hudson" + username = "Gerry81" } - name = "Irvin Rippin" + name = "Mr. Simon Altenwerth" secret_id = "...my_secret_id..." - workspace_id = "b4ecae6c-3d5d-4b3a-9ebd-5daea4c506a8" + workspace_id = "c802e2ec-09ff-48f0-b816-ff3477c13e90" } ``` diff --git a/docs/resources/source_clickup_api.md b/docs/resources/source_clickup_api.md index 5c87bdcd9..3fe26b280 100644 --- a/docs/resources/source_clickup_api.md +++ b/docs/resources/source_clickup_api.md @@ -17,15 +17,15 @@ resource "airbyte_source_clickup_api" "my_source_clickupapi" { configuration = { api_token = "...my_api_token..." folder_id = "...my_folder_id..." - include_closed_tasks = false + include_closed_tasks = true list_id = "...my_list_id..." source_type = "clickup-api" space_id = "...my_space_id..." team_id = "...my_team_id..." } - name = "Sergio Grant Sr." + name = "Mr. Jack Gottlieb" secret_id = "...my_secret_id..." - workspace_id = "644cf5e9-d9a4-4578-adc1-ac600dec001a" + workspace_id = "b0960a66-8151-4a47-aaf9-23c5949f83f3" } ``` diff --git a/docs/resources/source_clockify.md b/docs/resources/source_clockify.md index 9ca1a9257..113eb7b03 100644 --- a/docs/resources/source_clockify.md +++ b/docs/resources/source_clockify.md @@ -20,9 +20,9 @@ resource "airbyte_source_clockify" "my_source_clockify" { source_type = "clockify" workspace_id = "...my_workspace_id..." } - name = "Guy Beier" + name = "Angela Schaefer" secret_id = "...my_secret_id..." - workspace_id = "2ec09ff8-f0f8-416f-b347-7c13e902c141" + workspace_id = "76ffb901-c6ec-4bb4-a243-cf789ffafeda" } ``` diff --git a/docs/resources/source_close_com.md b/docs/resources/source_close_com.md index a8fbf2e89..b28cb6c3a 100644 --- a/docs/resources/source_close_com.md +++ b/docs/resources/source_close_com.md @@ -19,9 +19,9 @@ resource "airbyte_source_close_com" "my_source_closecom" { source_type = "close-com" start_date = "2021-01-01" } - name = "Robert Muller MD" + name = "Ronnie Nikolaus" secret_id = "...my_secret_id..." - workspace_id = "668151a4-72af-4923-8594-9f83f350cf87" + workspace_id = "e0ac184c-2b9c-4247-8883-73a40e1942f3" } ``` diff --git a/docs/resources/source_coda.md b/docs/resources/source_coda.md index cb3289641..cc909b5dd 100644 --- a/docs/resources/source_coda.md +++ b/docs/resources/source_coda.md @@ -18,9 +18,9 @@ resource "airbyte_source_coda" "my_source_coda" { auth_token = "...my_auth_token..." source_type = "coda" } - name = "Nettie Wilkinson" + name = "Lila Harris II" secret_id = "...my_secret_id..." - workspace_id = "01c6ecbb-4e24-43cf-b89f-fafeda53e5ae" + workspace_id = "5756f5d5-6d0b-4d0a-b2df-e13db4f62cba" } ``` diff --git a/docs/resources/source_coin_api.md b/docs/resources/source_coin_api.md index 1e6af0c4d..3bd2f51bf 100644 --- a/docs/resources/source_coin_api.md +++ b/docs/resources/source_coin_api.md @@ -19,14 +19,14 @@ resource "airbyte_source_coin_api" "my_source_coinapi" { end_date = "2019-01-01T00:00:00" environment = "sandbox" limit = 10 - period = "5SEC" + period = "2MTH" source_type = "coin-api" start_date = "2019-01-01T00:00:00" symbol_id = "...my_symbol_id..." } - name = "Myron Boyle" + name = "Francis Boyle" secret_id = "...my_secret_id..." - workspace_id = "c2b9c247-c883-473a-80e1-942f32e55055" + workspace_id = "bc0b80a6-924d-43b2-acfc-c8f895010f5d" } ``` diff --git a/docs/resources/source_coinmarketcap.md b/docs/resources/source_coinmarketcap.md index 44e5cb790..c2cd2f263 100644 --- a/docs/resources/source_coinmarketcap.md +++ b/docs/resources/source_coinmarketcap.md @@ -16,15 +16,15 @@ SourceCoinmarketcap Resource resource "airbyte_source_coinmarketcap" "my_source_coinmarketcap" { configuration = { api_key = "...my_api_key..." - data_type = "latest" + data_type = "historical" source_type = "coinmarketcap" symbols = [ "...", ] } - name = "Elsie West" + name = "Meredith Kassulke" secret_id = "...my_secret_id..." - workspace_id = "56d0bd0a-f2df-4e13-9b4f-62cba3f8941a" + workspace_id = "1804e54c-82f1-468a-b63c-8873e484380b" } ``` diff --git a/docs/resources/source_configcat.md b/docs/resources/source_configcat.md index 4fa145d55..0664fac49 100644 --- a/docs/resources/source_configcat.md +++ b/docs/resources/source_configcat.md @@ -17,11 +17,11 @@ resource "airbyte_source_configcat" "my_source_configcat" { configuration = { password = "...my_password..." source_type = "configcat" - username = "Shaylee.Pfeffer71" + username = "Art_Wiegand" } - name = "Timothy Nolan" + name = "Lowell Oberbrunner" secret_id = "...my_secret_id..." - workspace_id = "24d3b2ec-fcc8-4f89-9010-f5dd3d6fa180" + workspace_id = "5a60a04c-495c-4c69-9171-b51c1bdb1cf4" } ``` diff --git a/docs/resources/source_confluence.md b/docs/resources/source_confluence.md index 9c15ab930..65945d69a 100644 --- a/docs/resources/source_confluence.md +++ b/docs/resources/source_confluence.md @@ -20,9 +20,9 @@ resource "airbyte_source_confluence" "my_source_confluence" { email = "abc@example.com" source_type = "confluence" } - name = "Ms. Russell Wunsch" + name = "Jody Will" secret_id = "...my_secret_id..." - workspace_id = "a363c887-3e48-4438-8b1f-6b8ca275a60a" + workspace_id = "ccca99bc-7fc0-4b2d-8e10-873e42b006d6" } ``` diff --git a/docs/resources/source_convex.md b/docs/resources/source_convex.md index 23decab52..107ac4ea7 100644 --- a/docs/resources/source_convex.md +++ b/docs/resources/source_convex.md @@ -19,9 +19,9 @@ resource "airbyte_source_convex" "my_source_convex" { deployment_url = "https://murky-swan-635.convex.cloud" source_type = "convex" } - name = "Jody Gutmann" + name = "Guy Kovacek" secret_id = "...my_secret_id..." - workspace_id = "cc699171-b51c-41bd-b1cf-4b888ebdfc4c" + workspace_id = "a8581a58-208c-454f-afa9-c95f2eac5565" } ``` diff --git a/docs/resources/source_datadog.md b/docs/resources/source_datadog.md deleted file mode 100644 index b8fea1a05..000000000 --- a/docs/resources/source_datadog.md +++ /dev/null @@ -1,87 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "airbyte_source_datadog Resource - terraform-provider-airbyte" -subcategory: "" -description: |- - SourceDatadog Resource ---- - -# airbyte_source_datadog (Resource) - -SourceDatadog Resource - -## Example Usage - -```terraform -resource "airbyte_source_datadog" "my_source_datadog" { - configuration = { - api_key = "...my_api_key..." - application_key = "...my_application_key..." - end_date = "2022-10-01T00:00:00Z" - max_records_per_request = 8 - queries = [ - { - data_source = "rum" - name = "Marshall McClure" - query = "...my_query..." - }, - ] - query = "...my_query..." - site = "us5.datadoghq.com" - source_type = "datadog" - start_date = "2022-10-01T00:00:00Z" - } - name = "Wilbert Bashirian" - secret_id = "...my_secret_id..." - workspace_id = "dce10873-e42b-4006-9678-878ba8581a58" -} -``` - - -## Schema - -### Required - -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) -- `name` (String) -- `workspace_id` (String) - -### Optional - -- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow. - -### Read-Only - -- `source_id` (String) -- `source_type` (String) - - -### Nested Schema for `configuration` - -Required: - -- `api_key` (String) Datadog API key -- `application_key` (String) Datadog application key -- `source_type` (String) must be one of ["datadog"] - -Optional: - -- `end_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. -- `max_records_per_request` (Number) Maximum number of records to collect per request. -- `queries` (Attributes List) List of queries to be run and used as inputs. (see [below for nested schema](#nestedatt--configuration--queries)) -- `query` (String) The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. -- `site` (String) must be one of ["datadoghq.com", "us3.datadoghq.com", "us5.datadoghq.com", "datadoghq.eu", "ddog-gov.com"] -The site where Datadog data resides in. -- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. - - -### Nested Schema for `configuration.queries` - -Required: - -- `data_source` (String) must be one of ["metrics", "cloud_cost", "logs", "rum"] -A data source that is powered by the platform. -- `name` (String) The variable name for use in queries. -- `query` (String) A classic query string. - - diff --git a/docs/resources/source_datascope.md b/docs/resources/source_datascope.md index 79d6f31fd..bae217d54 100644 --- a/docs/resources/source_datascope.md +++ b/docs/resources/source_datascope.md @@ -19,9 +19,9 @@ resource "airbyte_source_datascope" "my_source_datascope" { source_type = "datascope" start_date = "dd/mm/YYYY HH:MM" } - name = "Amy Lynch" + name = "Danny Bahringer" secret_id = "...my_secret_id..." - workspace_id = "4fefa9c9-5f2e-4ac5-965d-307cfee81206" + workspace_id = "fee81206-e281-43fa-8a41-c480d3f2132a" } ``` diff --git a/docs/resources/source_delighted.md b/docs/resources/source_delighted.md index db1043968..b49cbcb20 100644 --- a/docs/resources/source_delighted.md +++ b/docs/resources/source_delighted.md @@ -19,9 +19,9 @@ resource "airbyte_source_delighted" "my_source_delighted" { since = "2022-05-30 04:50:23" source_type = "delighted" } - name = "Peggy Windler" + name = "Sarah Collier" secret_id = "...my_secret_id..." - workspace_id = "a41c480d-3f21-432a-b031-02d514f4cc6f" + workspace_id = "14f4cc6f-18bf-4962-9a6a-4f77a87ee3e4" } ``` diff --git a/docs/resources/source_dixa.md b/docs/resources/source_dixa.md index c18621575..00d9fb658 100644 --- a/docs/resources/source_dixa.md +++ b/docs/resources/source_dixa.md @@ -16,13 +16,13 @@ SourceDixa Resource resource "airbyte_source_dixa" "my_source_dixa" { configuration = { api_token = "...my_api_token..." - batch_size = 1 + batch_size = 31 source_type = "dixa" start_date = "YYYY-MM-DD" } - name = "Irvin Marks" + name = "Brittany Cole" secret_id = "...my_secret_id..." - workspace_id = "1a6a4f77-a87e-4e3e-8be7-52c65b34418e" + workspace_id = "5b34418e-3bb9-41c8-9975-e0e8419d8f84" } ``` diff --git a/docs/resources/source_dockerhub.md b/docs/resources/source_dockerhub.md index 0856d0ffb..45ad7b7aa 100644 --- a/docs/resources/source_dockerhub.md +++ b/docs/resources/source_dockerhub.md @@ -18,9 +18,9 @@ resource "airbyte_source_dockerhub" "my_source_dockerhub" { docker_username = "airbyte" source_type = "dockerhub" } - name = "Kristy Renner DDS" + name = "Joe Haag" secret_id = "...my_secret_id..." - workspace_id = "8d975e0e-8419-4d8f-84f1-44f3e07edcc4" + workspace_id = "3e07edcc-4aa5-4f3c-abd9-05a972e05672" } ``` diff --git a/docs/resources/source_dremio.md b/docs/resources/source_dremio.md index c43933901..ace6581e5 100644 --- a/docs/resources/source_dremio.md +++ b/docs/resources/source_dremio.md @@ -19,9 +19,9 @@ resource "airbyte_source_dremio" "my_source_dremio" { base_url = "...my_base_url..." source_type = "dremio" } - name = "Doug Hammes" + name = "Aaron Connelly" secret_id = "...my_secret_id..." - workspace_id = "cabd905a-972e-4056-b282-27b2d309470b" + workspace_id = "2d309470-bf7a-44fa-87cf-535a6fae54eb" } ``` diff --git a/docs/resources/source_dynamodb.md b/docs/resources/source_dynamodb.md index f4ea79036..6de681c81 100644 --- a/docs/resources/source_dynamodb.md +++ b/docs/resources/source_dynamodb.md @@ -17,14 +17,14 @@ resource "airbyte_source_dynamodb" "my_source_dynamodb" { configuration = { access_key_id = "A012345678910EXAMPLE" endpoint = "https://{aws_dynamo_db_url}.com" - region = "us-gov-east-1" + region = "us-gov-west-1" reserved_attribute_names = "name, field_name, field-name" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" source_type = "dynamodb" } - name = "Monique Hackett" + name = "Sandra Rowe Sr." secret_id = "...my_secret_id..." - workspace_id = "87cf535a-6fae-454e-bf60-c321f023b75d" + workspace_id = "f023b75d-2367-4fe1-a0cc-8df79f0a396d" } ``` diff --git a/docs/resources/source_e2e_test_cloud.md b/docs/resources/source_e2e_test_cloud.md index d3b6e7bb4..f8fb3fe22 100644 --- a/docs/resources/source_e2e_test_cloud.md +++ b/docs/resources/source_e2e_test_cloud.md @@ -15,8 +15,8 @@ SourceE2eTestCloud Resource ```terraform resource "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" { configuration = { - max_messages = 2 - message_interval_ms = 2 + max_messages = 6 + message_interval_ms = 0 mock_catalog = { source_e2e_test_cloud_mock_catalog_multi_schema = { stream_schemas = "...my_stream_schemas..." @@ -27,9 +27,9 @@ resource "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" { source_type = "e2e-test-cloud" type = "CONTINUOUS_FEED" } - name = "Miss Johanna Ward DDS" + name = "Gertrude Grant" secret_id = "...my_secret_id..." - workspace_id = "c8df79f0-a396-4d90-8364-b7c15dfbace1" + workspace_id = "c15dfbac-e188-4b1c-8ee2-c8c6ce611fee" } ``` diff --git a/docs/resources/source_emailoctopus.md b/docs/resources/source_emailoctopus.md index 753c0ee5f..43ef90e3a 100644 --- a/docs/resources/source_emailoctopus.md +++ b/docs/resources/source_emailoctopus.md @@ -18,9 +18,9 @@ resource "airbyte_source_emailoctopus" "my_source_emailoctopus" { api_key = "...my_api_key..." source_type = "emailoctopus" } - name = "Miss Nelson Robel" + name = "Gregory Satterfield" secret_id = "...my_secret_id..." - workspace_id = "ee2c8c6c-e611-4fee-b1c7-cbdb6eec7437" + workspace_id = "bdb6eec7-4378-4ba2-9317-747dc915ad2c" } ``` diff --git a/docs/resources/source_exchange_rates.md b/docs/resources/source_exchange_rates.md index d63fd619e..ca8d8a6e0 100644 --- a/docs/resources/source_exchange_rates.md +++ b/docs/resources/source_exchange_rates.md @@ -17,13 +17,13 @@ resource "airbyte_source_exchange_rates" "my_source_exchangerates" { configuration = { access_key = "...my_access_key..." base = "USD" - ignore_weekends = true + ignore_weekends = false source_type = "exchange-rates" start_date = "YYYY-MM-DD" } - name = "Annie Breitenberg" + name = "Mrs. Leslie Klocko" secret_id = "...my_secret_id..." - workspace_id = "47dc915a-d2ca-4f5d-9672-3dc0f5ae2f3a" + workspace_id = "c0f5ae2f-3a6b-4700-8787-56143f5a6c98" } ``` diff --git a/docs/resources/source_facebook_marketing.md b/docs/resources/source_facebook_marketing.md index 483e4e524..f34d17319 100644 --- a/docs/resources/source_facebook_marketing.md +++ b/docs/resources/source_facebook_marketing.md @@ -17,41 +17,41 @@ resource "airbyte_source_facebook_marketing" "my_source_facebookmarketing" { configuration = { access_token = "...my_access_token..." account_id = "111111111111111" - action_breakdowns_allow_empty = false + action_breakdowns_allow_empty = true client_id = "...my_client_id..." client_secret = "...my_client_secret..." custom_insights = [ { action_breakdowns = [ - "action_target_id", + "action_destination", ] - action_report_time = "impression" + action_report_time = "conversion" breakdowns = [ - "age", + "frequency_value", ] end_date = "2017-01-26T00:00:00Z" fields = [ - "estimated_ad_recall_rate_lower_bound", + "account_name", ] insights_lookback_window = 6 - level = "adset" - name = "Vera Bernhard" + level = "ad" + name = "Jesus Batz" start_date = "2017-01-25T00:00:00Z" - time_increment = 7 + time_increment = 8 }, ] end_date = "2017-01-26T00:00:00Z" fetch_thumbnail_images = false - include_deleted = false - insights_lookback_window = 7 - max_batch_size = 4 + include_deleted = true + insights_lookback_window = 4 + max_batch_size = 7 page_size = 3 source_type = "facebook-marketing" start_date = "2017-01-25T00:00:00Z" } - name = "Dr. Dorothy Lockman" + name = "Ms. Wilbert McGlynn" secret_id = "...my_secret_id..." - workspace_id = "0bcacc6c-bd6b-45f3-ac90-9304f926bad2" + workspace_id = "04f926ba-d255-4381-9b47-4b0ed20e5624" } ``` diff --git a/docs/resources/source_facebook_pages.md b/docs/resources/source_facebook_pages.md index 710d95208..92ca82db3 100644 --- a/docs/resources/source_facebook_pages.md +++ b/docs/resources/source_facebook_pages.md @@ -19,9 +19,9 @@ resource "airbyte_source_facebook_pages" "my_source_facebookpages" { page_id = "...my_page_id..." source_type = "facebook-pages" } - name = "Bernice Donnelly V" + name = "Moses Wuckert" secret_id = "...my_secret_id..." - workspace_id = "b474b0ed-20e5-4624-8fff-639a910abdca" + workspace_id = "39a910ab-dcab-4626-b669-6e1ec00221b3" } ``` diff --git a/docs/resources/source_faker.md b/docs/resources/source_faker.md index 38a289045..5fa327f17 100644 --- a/docs/resources/source_faker.md +++ b/docs/resources/source_faker.md @@ -16,15 +16,15 @@ SourceFaker Resource resource "airbyte_source_faker" "my_source_faker" { configuration = { always_updated = false - count = 4 - parallelism = 1 - records_per_slice = 4 - seed = 4 + count = 3 + parallelism = 9 + records_per_slice = 5 + seed = 6 source_type = "faker" } - name = "Beth McKenzie" + name = "Delbert Reynolds" secret_id = "...my_secret_id..." - workspace_id = "1ec00221-b335-4d89-acb3-ecfda8d0c549" + workspace_id = "cfda8d0c-549e-4f03-8049-78a61fa1cf20" } ``` diff --git a/docs/resources/source_fauna.md b/docs/resources/source_fauna.md index 0540b0344..e08ebd353 100644 --- a/docs/resources/source_fauna.md +++ b/docs/resources/source_fauna.md @@ -21,17 +21,17 @@ resource "airbyte_source_fauna" "my_source_fauna" { deletion_mode = "ignore" } } - page_size = 9 + page_size = 4 } domain = "...my_domain..." - port = 10 + port = 5 scheme = "...my_scheme..." secret = "...my_secret..." source_type = "fauna" } - name = "Mrs. Edna Abbott" + name = "Irvin Klein" secret_id = "...my_secret_id..." - workspace_id = "78a61fa1-cf20-4688-b77c-1ffc71dca163" + workspace_id = "1ffc71dc-a163-4f2a-bc80-a97ff334cddf" } ``` diff --git a/docs/resources/source_file_secure.md b/docs/resources/source_file_secure.md index eeb7f088b..79d6ae22a 100644 --- a/docs/resources/source_file_secure.md +++ b/docs/resources/source_file_secure.md @@ -16,7 +16,7 @@ SourceFileSecure Resource resource "airbyte_source_file_secure" "my_source_filesecure" { configuration = { dataset_name = "...my_dataset_name..." - format = "yaml" + format = "excel_binary" provider = { source_file_secure_storage_provider_az_blob_azure_blob_storage = { sas_token = "...my_sas_token..." @@ -25,13 +25,13 @@ resource "airbyte_source_file_secure" "my_source_filesecure" { storage_account = "...my_storage_account..." } } - reader_options = "{}" + reader_options = "{\"sep\": \" \"}" source_type = "file-secure" url = "gs://my-google-bucket/data.csv" } - name = "Miss Sheri Legros" + name = "Freddie Von V" secret_id = "...my_secret_id..." - workspace_id = "7ff334cd-df85-47a9-a618-76c6ab21d29d" + workspace_id = "76c6ab21-d29d-4fc9-8d6f-ecd799390066" } ``` diff --git a/docs/resources/source_firebolt.md b/docs/resources/source_firebolt.md index f2985708b..6d39095c2 100644 --- a/docs/resources/source_firebolt.md +++ b/docs/resources/source_firebolt.md @@ -23,9 +23,9 @@ resource "airbyte_source_firebolt" "my_source_firebolt" { source_type = "firebolt" username = "username@email.com" } - name = "Hector Yundt" + name = "Donna Abshire" secret_id = "...my_secret_id..." - workspace_id = "d7993900-66a6-4d2d-8003-55338cec086f" + workspace_id = "5338cec0-86fa-421e-9152-cb3119167b8e" } ``` diff --git a/docs/resources/source_freshcaller.md b/docs/resources/source_freshcaller.md index 28bc2c2c7..5201870a0 100644 --- a/docs/resources/source_freshcaller.md +++ b/docs/resources/source_freshcaller.md @@ -17,14 +17,14 @@ resource "airbyte_source_freshcaller" "my_source_freshcaller" { configuration = { api_key = "...my_api_key..." domain = "snaptravel" - requests_per_minute = 7 + requests_per_minute = 2 source_type = "freshcaller" start_date = "2022-01-01T12:00:00Z" sync_lag_minutes = 9 } - name = "Keith Hills" + name = "Kenneth Friesen IV" secret_id = "...my_secret_id..." - workspace_id = "b3119167-b8e3-4c8d-b034-08d6d364ffd4" + workspace_id = "d6d364ff-d455-4906-9126-3d48e935c2c9" } ``` diff --git a/docs/resources/source_freshdesk.md b/docs/resources/source_freshdesk.md index 5e2fed8d6..ac7a22698 100644 --- a/docs/resources/source_freshdesk.md +++ b/docs/resources/source_freshdesk.md @@ -17,13 +17,13 @@ resource "airbyte_source_freshdesk" "my_source_freshdesk" { configuration = { api_key = "...my_api_key..." domain = "myaccount.freshdesk.com" - requests_per_minute = 3 + requests_per_minute = 10 source_type = "freshdesk" start_date = "2020-12-01T00:00:00Z" } - name = "Mr. Tara Sporer" + name = "Dale Altenwerth" secret_id = "...my_secret_id..." - workspace_id = "3d48e935-c2c9-4e81-b30b-e3e43202d721" + workspace_id = "3e43202d-7216-4576-9066-41870d9d21f9" } ``` diff --git a/docs/resources/source_freshsales.md b/docs/resources/source_freshsales.md index f5db32933..01138f18f 100644 --- a/docs/resources/source_freshsales.md +++ b/docs/resources/source_freshsales.md @@ -19,9 +19,9 @@ resource "airbyte_source_freshsales" "my_source_freshsales" { domain_name = "mydomain.myfreshworks.com" source_type = "freshsales" } - name = "Geraldine Kling" + name = "Gustavo Adams DDS" secret_id = "...my_secret_id..." - workspace_id = "06641870-d9d2-41f9-ad03-0c4ecc11a083" + workspace_id = "4ecc11a0-8364-4290-a8b8-502a55e7f73b" } ``` diff --git a/docs/resources/source_gainsight_px.md b/docs/resources/source_gainsight_px.md index 49dd07442..b61fefa57 100644 --- a/docs/resources/source_gainsight_px.md +++ b/docs/resources/source_gainsight_px.md @@ -18,9 +18,9 @@ resource "airbyte_source_gainsight_px" "my_source_gainsightpx" { api_key = "...my_api_key..." source_type = "gainsight-px" } - name = "Anita Dare III" + name = "Hugh Goodwin" secret_id = "...my_secret_id..." - workspace_id = "8b8502a5-5e7f-473b-8845-e320a319f4ba" + workspace_id = "320a319f-4bad-4f94-bc9a-867bc4242666" } ``` diff --git a/docs/resources/source_gcs.md b/docs/resources/source_gcs.md index ad947ac69..36995e4c2 100644 --- a/docs/resources/source_gcs.md +++ b/docs/resources/source_gcs.md @@ -20,9 +20,9 @@ resource "airbyte_source_gcs" "my_source_gcs" { service_account = "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID, \"private_key_id\": YOUR_PRIVATE_KEY, ... }" source_type = "gcs" } - name = "Darin Mante" + name = "Olga Blanda" secret_id = "...my_secret_id..." - workspace_id = "c9a867bc-4242-4666-9816-ddca8ef51fcb" + workspace_id = "dca8ef51-fcb4-4c59-bec1-2cdaad0ec7af" } ``` diff --git a/docs/resources/source_getlago.md b/docs/resources/source_getlago.md index 8be7811c9..ffb27eb2c 100644 --- a/docs/resources/source_getlago.md +++ b/docs/resources/source_getlago.md @@ -18,9 +18,9 @@ resource "airbyte_source_getlago" "my_source_getlago" { api_key = "...my_api_key..." source_type = "getlago" } - name = "Myra Hills" + name = "Irving Rohan" secret_id = "...my_secret_id..." - workspace_id = "ec12cdaa-d0ec-47af-adbd-80df448a47f9" + workspace_id = "0df448a4-7f93-490c-9888-0983dabf9ef3" } ``` diff --git a/docs/resources/source_github.md b/docs/resources/source_github.md index 4989b76ce..fe3191ddf 100644 --- a/docs/resources/source_github.md +++ b/docs/resources/source_github.md @@ -24,14 +24,14 @@ resource "airbyte_source_github" "my_source_github" { option_title = "OAuth Credentials" } } - repository = "airbytehq/airbyte airbytehq/another-repo" - requests_per_hour = 6 + repository = "airbytehq/airbyte" + requests_per_hour = 10 source_type = "github" start_date = "2021-03-01T00:00:00Z" } - name = "Brandy Kuvalis V" + name = "Van Kuhlman IV" secret_id = "...my_secret_id..." - workspace_id = "83dabf9e-f3ff-4dd9-b7f0-79af4d35724c" + workspace_id = "9af4d357-24cd-4b0f-8d28-1187d56844ed" } ``` diff --git a/docs/resources/source_gitlab.md b/docs/resources/source_gitlab.md index db961ccf4..169076e81 100644 --- a/docs/resources/source_gitlab.md +++ b/docs/resources/source_gitlab.md @@ -23,7 +23,7 @@ resource "airbyte_source_gitlab" "my_source_gitlab" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2022-11-19T07:42:45.478Z" + token_expiry_date = "2021-06-26T03:36:42.239Z" } } groups = "airbyte.io" @@ -31,9 +31,9 @@ resource "airbyte_source_gitlab" "my_source_gitlab" { source_type = "gitlab" start_date = "2021-03-01T00:00:00Z" } - name = "Mr. Jesse Luettgen" + name = "Frank Keeling" secret_id = "...my_secret_id..." - workspace_id = "7d56844e-ded8-45a9-865e-628bdfc2032b" + workspace_id = "628bdfc2-032b-46c8-b992-3b7e13584f7a" } ``` diff --git a/docs/resources/source_glassfrog.md b/docs/resources/source_glassfrog.md index fde93c053..9d864489f 100644 --- a/docs/resources/source_glassfrog.md +++ b/docs/resources/source_glassfrog.md @@ -18,9 +18,9 @@ resource "airbyte_source_glassfrog" "my_source_glassfrog" { api_key = "...my_api_key..." source_type = "glassfrog" } - name = "Angelica Langworth" + name = "Carl Davis" secret_id = "...my_secret_id..." - workspace_id = "923b7e13-584f-47ae-92c6-891f82ce1157" + workspace_id = "891f82ce-1157-4172-b053-77dcfa89df97" } ``` diff --git a/docs/resources/source_gnews.md b/docs/resources/source_gnews.md index e50be2bec..43bff1408 100644 --- a/docs/resources/source_gnews.md +++ b/docs/resources/source_gnews.md @@ -16,25 +16,25 @@ SourceGnews Resource resource "airbyte_source_gnews" "my_source_gnews" { configuration = { api_key = "...my_api_key..." - country = "ca" + country = "ie" end_date = "2022-08-21 16:27:09" in = [ - "description", + "content", ] - language = "en" + language = "fr" nullable = [ - "title", + "description", ] - query = "Microsoft Windows 10" + query = "Apple AND NOT iPhone" sortby = "publishedAt" source_type = "gnews" start_date = "2022-08-21 16:27:09" - top_headlines_query = "Apple OR Microsoft" - top_headlines_topic = "technology" + top_headlines_query = "Apple AND NOT iPhone" + top_headlines_topic = "business" } - name = "Mercedes Schiller" + name = "Katrina Considine" secret_id = "...my_secret_id..." - workspace_id = "89df975e-3566-4860-92e9-c3ddc5f111de" + workspace_id = "c3ddc5f1-11de-4a10-a6d5-41a4d190feb2" } ``` diff --git a/docs/resources/source_google_ads.md b/docs/resources/source_google_ads.md index 95b5d762f..0d20ad3dc 100644 --- a/docs/resources/source_google_ads.md +++ b/docs/resources/source_google_ads.md @@ -35,9 +35,9 @@ resource "airbyte_source_google_ads" "my_source_googleads" { source_type = "google-ads" start_date = "2017-01-25" } - name = "Miss Lloyd Funk" + name = "Dr. Forrest Roob" secret_id = "...my_secret_id..." - workspace_id = "d190feb2-1780-4bcc-80db-bddb484708fb" + workspace_id = "bddb4847-08fb-44e3-91e6-bc158c4c4e54" } ``` @@ -67,14 +67,14 @@ Required: - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) - `customer_id` (String) Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation. - `source_type` (String) must be one of ["google-ads"] -- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. Optional: - `conversion_window_days` (Number) A conversion window is the number of days after an ad interaction (such as an ad click or video view) during which a conversion, such as a purchase, is recorded in Google Ads. For more information, see Google's documentation. - `custom_queries` (Attributes List) (see [below for nested schema](#nestedatt--configuration--custom_queries)) -- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. +- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set) - `login_customer_id` (String) If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation. +- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set) ### Nested Schema for `configuration.credentials` diff --git a/docs/resources/source_google_analytics_data_api.md b/docs/resources/source_google_analytics_data_api.md index 5428f6475..cbd825a38 100644 --- a/docs/resources/source_google_analytics_data_api.md +++ b/docs/resources/source_google_analytics_data_api.md @@ -26,13 +26,13 @@ resource "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsda } custom_reports = "...my_custom_reports..." date_ranges_start_date = "2021-01-01" - property_id = "1738294" + property_id = "5729978930" source_type = "google-analytics-data-api" - window_in_days = 120 + window_in_days = 364 } - name = "Duane Prohaska II" + name = "Juanita Collier" secret_id = "...my_secret_id..." - workspace_id = "8c4c4e54-599e-4a34-a260-e9b200ce78a1" + workspace_id = "0e9b200c-e78a-41bd-8fb7-a0a116ce723d" } ``` diff --git a/docs/resources/source_google_analytics_v4.md b/docs/resources/source_google_analytics_v4.md index 3980164bd..1a2d2ab2a 100644 --- a/docs/resources/source_google_analytics_v4.md +++ b/docs/resources/source_google_analytics_v4.md @@ -30,9 +30,9 @@ resource "airbyte_source_google_analytics_v4" "my_source_googleanalyticsv4" { view_id = "...my_view_id..." window_in_days = 120 } - name = "Miss Harvey Nicolas Sr." + name = "Dr. Doug Dibbert" secret_id = "...my_secret_id..." - workspace_id = "6ce723d4-097f-4a30-a9af-725b29122030" + workspace_id = "af725b29-1220-430d-83f5-aeb7799d22e8" } ``` diff --git a/docs/resources/source_google_directory.md b/docs/resources/source_google_directory.md index b03ffbe06..277f8a891 100644 --- a/docs/resources/source_google_directory.md +++ b/docs/resources/source_google_directory.md @@ -19,14 +19,14 @@ resource "airbyte_source_google_directory" "my_source_googledirectory" { source_google_directory_google_credentials_service_account_key = { credentials_json = "...my_credentials_json..." credentials_title = "Service accounts" - email = "Juvenal.Frami64@hotmail.com" + email = "Ayla.Zulauf@hotmail.com" } } source_type = "google-directory" } - name = "Kelvin Kshlerin" + name = "Mrs. Allen Lockman" secret_id = "...my_secret_id..." - workspace_id = "9d22e8c1-f849-4382-9fdc-42c876c2c2df" + workspace_id = "dc42c876-c2c2-4dfb-8cfc-1c76230f841f" } ``` diff --git a/docs/resources/source_google_pagespeed_insights.md b/docs/resources/source_google_pagespeed_insights.md index 54811931c..c574bdc92 100644 --- a/docs/resources/source_google_pagespeed_insights.md +++ b/docs/resources/source_google_pagespeed_insights.md @@ -27,9 +27,9 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin "...", ] } - name = "Miss Terrell Satterfield" + name = "Kristopher Dare" secret_id = "...my_secret_id..." - workspace_id = "6230f841-fb1b-4d23-bdb1-4db6be5a6859" + workspace_id = "db14db6b-e5a6-4859-98e2-2ae20da16fc2" } ``` diff --git a/docs/resources/source_google_search_console.md b/docs/resources/source_google_search_console.md index f63cd4164..5d721b254 100644 --- a/docs/resources/source_google_search_console.md +++ b/docs/resources/source_google_search_console.md @@ -25,17 +25,25 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole" } } custom_reports = "...my_custom_reports..." - data_state = "final" - end_date = "2021-12-12" + custom_reports_array = [ + { + dimensions = [ + "page", + ] + name = "Heidi Bernier" + }, + ] + data_state = "all" + end_date = "2021-12-12" site_urls = [ "...", ] source_type = "google-search-console" - start_date = "2021-01-01" + start_date = "2022-07-11" } - name = "Janie Swift PhD" + name = "Jordan Hilll" secret_id = "...my_secret_id..." - workspace_id = "a16fc2b2-71a2-489c-97e8-54e90439d222" + workspace_id = "90439d22-2465-4694-a240-7084f7ab37ce" } ``` @@ -63,16 +71,17 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole" Required: - `authorization` (Attributes) (see [below for nested schema](#nestedatt--configuration--authorization)) -- `site_urls` (List of String) The URLs of the website property attached to your GSC account. Read more here. +- `site_urls` (List of String) The URLs of the website property attached to your GSC account. Learn more about properties here. - `source_type` (String) must be one of ["google-search-console"] -- `start_date` (String) UTC date in the format 2017-01-25. Any data before this date will not be replicated. Optional: -- `custom_reports` (String) A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field. +- `custom_reports` (String) (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports. +- `custom_reports_array` (Attributes List) You can add your Custom Analytics report by creating one. (see [below for nested schema](#nestedatt--configuration--custom_reports_array)) - `data_state` (String) must be one of ["final", "all"] -If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. -- `end_date` (String) UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field. +If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. +- `end_date` (String) UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. +- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. ### Nested Schema for `configuration.authorization` @@ -134,3 +143,13 @@ Required: - `service_account_info` (String) The JSON key of the service account to use for authorization. Read more here. + + +### Nested Schema for `configuration.custom_reports_array` + +Required: + +- `dimensions` (List of String) A list of dimensions (country, date, device, page, query) +- `name` (String) The name of the custom report, this name would be used as stream name + + diff --git a/docs/resources/source_google_sheets.md b/docs/resources/source_google_sheets.md index 3aae039b4..3ba1744db 100644 --- a/docs/resources/source_google_sheets.md +++ b/docs/resources/source_google_sheets.md @@ -24,13 +24,12 @@ resource "airbyte_source_google_sheets" "my_source_googlesheets" { } } names_conversion = true - row_batch_size = 100 source_type = "google-sheets" spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" } - name = "Mae Gleichner" + name = "Irene Davis" secret_id = "...my_secret_id..." - workspace_id = "407084f7-ab37-4cef-8222-5194db55410a" + workspace_id = "194db554-10ad-4c66-9af9-0a26c7cdc981" } ``` @@ -64,7 +63,6 @@ Required: Optional: - `names_conversion` (Boolean) Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based. -- `row_batch_size` (Number) The number of rows fetched when making a Google Sheet API call. Defaults to 200. ### Nested Schema for `configuration.credentials` diff --git a/docs/resources/source_google_webfonts.md b/docs/resources/source_google_webfonts.md index 52e4bac68..002d3ab6f 100644 --- a/docs/resources/source_google_webfonts.md +++ b/docs/resources/source_google_webfonts.md @@ -21,9 +21,9 @@ resource "airbyte_source_google_webfonts" "my_source_googlewebfonts" { sort = "...my_sort..." source_type = "google-webfonts" } - name = "Garrett Hoeger" + name = "Donald Hyatt" secret_id = "...my_secret_id..." - workspace_id = "af90a26c-7cdc-4981-b068-981d6bb33cfa" + workspace_id = "81d6bb33-cfaa-4348-831b-f407ee4fcf0c" } ``` diff --git a/docs/resources/source_google_workspace_admin_reports.md b/docs/resources/source_google_workspace_admin_reports.md index cf100ebff..9c81767bf 100644 --- a/docs/resources/source_google_workspace_admin_reports.md +++ b/docs/resources/source_google_workspace_admin_reports.md @@ -16,13 +16,13 @@ SourceGoogleWorkspaceAdminReports Resource resource "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspaceadminreports" { configuration = { credentials_json = "...my_credentials_json..." - email = "Daron_Green@hotmail.com" - lookback = 2 + email = "Bridgette_Rohan@gmail.com" + lookback = 10 source_type = "google-workspace-admin-reports" } - name = "Antoinette Wolf IV" + name = "Samantha Huels" secret_id = "...my_secret_id..." - workspace_id = "ee4fcf0c-42b7-48f1-9626-398a0dc76632" + workspace_id = "398a0dc7-6632-44cc-b06c-8ca12d025292" } ``` diff --git a/docs/resources/source_greenhouse.md b/docs/resources/source_greenhouse.md index c1284e5c9..1a78be76f 100644 --- a/docs/resources/source_greenhouse.md +++ b/docs/resources/source_greenhouse.md @@ -18,9 +18,9 @@ resource "airbyte_source_greenhouse" "my_source_greenhouse" { api_key = "...my_api_key..." source_type = "greenhouse" } - name = "Kendra Schmitt III" + name = "Patricia Pouros" secret_id = "...my_secret_id..." - workspace_id = "c8ca12d0-2529-4270-b8d5-722dd895b8bc" + workspace_id = "5722dd89-5b8b-4cf2-8db9-59693352f745" } ``` diff --git a/docs/resources/source_gridly.md b/docs/resources/source_gridly.md index 91f9d1869..01d5a0080 100644 --- a/docs/resources/source_gridly.md +++ b/docs/resources/source_gridly.md @@ -19,9 +19,9 @@ resource "airbyte_source_gridly" "my_source_gridly" { grid_id = "...my_grid_id..." source_type = "gridly" } - name = "Ernest Grimes" + name = "Josephine McCullough" secret_id = "...my_secret_id..." - workspace_id = "95969335-2f74-4533-994d-78de3b6e9389" + workspace_id = "d78de3b6-e938-49f5-abb7-f662550a2838" } ``` diff --git a/docs/resources/source_harvest.md b/docs/resources/source_harvest.md index e24d8f247..1eebc2ea3 100644 --- a/docs/resources/source_harvest.md +++ b/docs/resources/source_harvest.md @@ -28,9 +28,9 @@ resource "airbyte_source_harvest" "my_source_harvest" { replication_start_date = "2017-01-25T00:00:00Z" source_type = "harvest" } - name = "Harvey Wisoky" + name = "Rodney Orn" secret_id = "...my_secret_id..." - workspace_id = "2550a283-82ac-4483-afd2-315bba650164" + workspace_id = "2315bba6-5016-44e0-af5b-f6ae591bc8bd" } ``` diff --git a/docs/resources/source_hubplanner.md b/docs/resources/source_hubplanner.md index c8ed20afc..f20025573 100644 --- a/docs/resources/source_hubplanner.md +++ b/docs/resources/source_hubplanner.md @@ -18,9 +18,9 @@ resource "airbyte_source_hubplanner" "my_source_hubplanner" { api_key = "...my_api_key..." source_type = "hubplanner" } - name = "Scott Jast" + name = "Cary Emmerich Sr." secret_id = "...my_secret_id..." - workspace_id = "bf6ae591-bc8b-4def-b612-b63c205fda84" + workspace_id = "b63c205f-da84-4077-8a68-a9a35d086b6f" } ``` diff --git a/docs/resources/source_hubspot.md b/docs/resources/source_hubspot.md index 01f145b42..a6f8e891a 100644 --- a/docs/resources/source_hubspot.md +++ b/docs/resources/source_hubspot.md @@ -26,9 +26,9 @@ resource "airbyte_source_hubspot" "my_source_hubspot" { source_type = "hubspot" start_date = "2017-01-25T00:00:00Z" } - name = "Pauline Paucek" + name = "Mr. Tomas Wisozk DVM" secret_id = "...my_secret_id..." - workspace_id = "a9a35d08-6b6f-466f-af02-0e9f443b4257" + workspace_id = "9f443b42-57b9-492c-8dbd-a6a61efa2198" } ``` diff --git a/docs/resources/source_insightly.md b/docs/resources/source_insightly.md index e56efd368..576daa4ed 100644 --- a/docs/resources/source_insightly.md +++ b/docs/resources/source_insightly.md @@ -19,9 +19,9 @@ resource "airbyte_source_insightly" "my_source_insightly" { start_date = "2021-03-01T00:00:00Z" token = "...my_token..." } - name = "Arturo Marks" + name = "Dana Lindgren" secret_id = "...my_secret_id..." - workspace_id = "8dbda6a6-1efa-4219-8258-fd0a9eba47f7" + workspace_id = "0a9eba47-f7d3-4ef0-8964-0d6a1831c87a" } ``` diff --git a/docs/resources/source_instagram.md b/docs/resources/source_instagram.md index f5743326b..926ad00ad 100644 --- a/docs/resources/source_instagram.md +++ b/docs/resources/source_instagram.md @@ -21,9 +21,9 @@ resource "airbyte_source_instagram" "my_source_instagram" { source_type = "instagram" start_date = "2017-01-25T00:00:00Z" } - name = "Randal Aufderhar" + name = "Mae Hoppe" secret_id = "...my_secret_id..." - workspace_id = "640d6a18-31c8-47ad-b596-fdf1ad837ae8" + workspace_id = "f1ad837a-e80c-41c1-9c95-ba998678fa3f" } ``` diff --git a/docs/resources/source_instatus.md b/docs/resources/source_instatus.md index dac0e70bf..830c152ab 100644 --- a/docs/resources/source_instatus.md +++ b/docs/resources/source_instatus.md @@ -18,9 +18,9 @@ resource "airbyte_source_instatus" "my_source_instatus" { api_key = "...my_api_key..." source_type = "instatus" } - name = "Rosalie Bruen V" + name = "Bobbie Johnston" secret_id = "...my_secret_id..." - workspace_id = "c95ba998-678f-4a3f-a969-91af388ce036" + workspace_id = "1af388ce-0361-4444-8c79-77a0ef2f5360" } ``` diff --git a/docs/resources/source_intercom.md b/docs/resources/source_intercom.md index adbf544c9..d2ad37413 100644 --- a/docs/resources/source_intercom.md +++ b/docs/resources/source_intercom.md @@ -15,13 +15,15 @@ SourceIntercom Resource ```terraform resource "airbyte_source_intercom" "my_source_intercom" { configuration = { - access_token = "...my_access_token..." - source_type = "intercom" - start_date = "2020-11-16T00:00:00Z" + access_token = "...my_access_token..." + client_id = "...my_client_id..." + client_secret = "...my_client_secret..." + source_type = "intercom" + start_date = "2020-11-16T00:00:00Z" } - name = "Emma Lueilwitz" + name = "Darnell Watsica" secret_id = "...my_secret_id..." - workspace_id = "977a0ef2-f536-4028-afee-f934152ed7e2" + workspace_id = "934152ed-7e25-43f4-8157-deaa7170f445" } ``` @@ -52,4 +54,9 @@ Required: - `source_type` (String) must be one of ["intercom"] - `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. +Optional: + +- `client_id` (String) Client Id for your Intercom application. +- `client_secret` (String) Client Secret for your Intercom application. + diff --git a/docs/resources/source_ip2whois.md b/docs/resources/source_ip2whois.md index 70993bbbc..623f30ce1 100644 --- a/docs/resources/source_ip2whois.md +++ b/docs/resources/source_ip2whois.md @@ -16,12 +16,12 @@ SourceIp2whois Resource resource "airbyte_source_ip2whois" "my_source_ip2whois" { configuration = { api_key = "...my_api_key..." - domain = "www.google.com" + domain = "www.facebook.com" source_type = "ip2whois" } - name = "Lucia Gorczany II" + name = "Leland Wisoky" secret_id = "...my_secret_id..." - workspace_id = "7deaa717-0f44-45ac-8f66-7aaf9bbad185" + workspace_id = "7aaf9bba-d185-4fe4-b1d6-bf5c838fbb8c" } ``` diff --git a/docs/resources/source_iterable.md b/docs/resources/source_iterable.md index f3b02a168..4c6f49f35 100644 --- a/docs/resources/source_iterable.md +++ b/docs/resources/source_iterable.md @@ -19,9 +19,9 @@ resource "airbyte_source_iterable" "my_source_iterable" { source_type = "iterable" start_date = "2021-04-01T00:00:00Z" } - name = "Peggy Bergstrom" + name = "Archie Jaskolski" secret_id = "...my_secret_id..." - workspace_id = "bf5c838f-bb8c-420c-b67f-c4b425e99e62" + workspace_id = "c4b425e9-9e62-434c-9f7b-79dfeb77a5c3" } ``` diff --git a/docs/resources/source_jira.md b/docs/resources/source_jira.md index 030f09050..cccfae197 100644 --- a/docs/resources/source_jira.md +++ b/docs/resources/source_jira.md @@ -16,10 +16,10 @@ SourceJira Resource resource "airbyte_source_jira" "my_source_jira" { configuration = { api_token = "...my_api_token..." - domain = ".atlassian.net" - email = "Ottilie.McCullough73@gmail.com" + domain = ".jira.com" + email = "Eldridge_Reichert@hotmail.com" enable_experimental_streams = false - expand_issue_changelog = true + expand_issue_changelog = false projects = [ "...", ] @@ -27,9 +27,9 @@ resource "airbyte_source_jira" "my_source_jira" { source_type = "jira" start_date = "2021-03-01T00:00:00Z" } - name = "Mathew Klocko" + name = "Olive Windler" secret_id = "...my_secret_id..." - workspace_id = "c38d4baf-91e5-406e-b890-a54b475f16f5" + workspace_id = "0a54b475-f16f-456d-b85a-3c4ac631b99e" } ``` diff --git a/docs/resources/source_k6_cloud.md b/docs/resources/source_k6_cloud.md index 1833d4189..7259f593b 100644 --- a/docs/resources/source_k6_cloud.md +++ b/docs/resources/source_k6_cloud.md @@ -18,9 +18,9 @@ resource "airbyte_source_k6_cloud" "my_source_k6cloud" { api_token = "...my_api_token..." source_type = "k6-cloud" } - name = "Marcella Dooley" + name = "Ella Runolfsdottir" secret_id = "...my_secret_id..." - workspace_id = "a3c4ac63-1b99-4e26-8ed8-f9fdb9410f63" + workspace_id = "8f9fdb94-10f6-43bb-b817-837b01afdd78" } ``` diff --git a/docs/resources/source_klarna.md b/docs/resources/source_klarna.md index aefd5eed9..3ecc96a20 100644 --- a/docs/resources/source_klarna.md +++ b/docs/resources/source_klarna.md @@ -16,14 +16,14 @@ SourceKlarna Resource resource "airbyte_source_klarna" "my_source_klarna" { configuration = { password = "...my_password..." - playground = false - region = "oc" + playground = true + region = "us" source_type = "klarna" - username = "Tristin47" + username = "Chase50" } - name = "Jacob Krajcik Sr." + name = "Caleb Rau" secret_id = "...my_secret_id..." - workspace_id = "afdd7886-2418-49eb-8487-3f5033f19dbf" + workspace_id = "873f5033-f19d-4bf1-a5ce-4152eab9cd7e" } ``` diff --git a/docs/resources/source_klaviyo.md b/docs/resources/source_klaviyo.md index 8967d5791..34b618c3f 100644 --- a/docs/resources/source_klaviyo.md +++ b/docs/resources/source_klaviyo.md @@ -19,9 +19,9 @@ resource "airbyte_source_klaviyo" "my_source_klaviyo" { source_type = "klaviyo" start_date = "2017-01-25T00:00:00Z" } - name = "Della Trantow II" + name = "Charlotte Muller" secret_id = "...my_secret_id..." - workspace_id = "2eab9cd7-e522-44a6-a0e1-23b7847ec59e" + workspace_id = "0e123b78-47ec-459e-9f67-f3c4cce4b6d7" } ``` diff --git a/docs/resources/source_kustomer_singer.md b/docs/resources/source_kustomer_singer.md index 46d43c953..1fdcb97e2 100644 --- a/docs/resources/source_kustomer_singer.md +++ b/docs/resources/source_kustomer_singer.md @@ -19,9 +19,9 @@ resource "airbyte_source_kustomer_singer" "my_source_kustomersinger" { source_type = "kustomer-singer" start_date = "2019-01-01T00:00:00Z" } - name = "Camille Johnston" + name = "Bobbie Jacobs" secret_id = "...my_secret_id..." - workspace_id = "3c4cce4b-6d76-496f-b3c5-747501357e44" + workspace_id = "3c574750-1357-4e44-b51f-8b084c3197e1" } ``` diff --git a/docs/resources/source_kyve.md b/docs/resources/source_kyve.md index 446f38c52..2ce36824e 100644 --- a/docs/resources/source_kyve.md +++ b/docs/resources/source_kyve.md @@ -15,16 +15,16 @@ SourceKyve Resource ```terraform resource "airbyte_source_kyve" "my_source_kyve" { configuration = { - max_pages = 10 - page_size = 4 - pool_ids = "0" + max_pages = 6 + page_size = 2 + pool_ids = "0,1" source_type = "kyve" - start_ids = "0,0" - url_base = "https://api.beta.kyve.network/" + start_ids = "0" + url_base = "https://api.korellia.kyve.network/" } - name = "William Larson" + name = "Gail Homenick" secret_id = "...my_secret_id..." - workspace_id = "3197e193-a245-4467-b948-74c2d5cc4972" + workspace_id = "94874c2d-5cc4-4972-a33e-66bd8fe5d00b" } ``` diff --git a/docs/resources/source_launchdarkly.md b/docs/resources/source_launchdarkly.md index 2a093d0ab..aac121903 100644 --- a/docs/resources/source_launchdarkly.md +++ b/docs/resources/source_launchdarkly.md @@ -18,9 +18,9 @@ resource "airbyte_source_launchdarkly" "my_source_launchdarkly" { access_token = "...my_access_token..." source_type = "launchdarkly" } - name = "Edith Dickens" + name = "Darren Monahan" secret_id = "...my_secret_id..." - workspace_id = "6bd8fe5d-00b9-479e-b203-87320590ccc1" + workspace_id = "20387320-590c-4cc1-8964-00313b3e5044" } ``` diff --git a/docs/resources/source_lemlist.md b/docs/resources/source_lemlist.md index d843e9618..0b231b6ab 100644 --- a/docs/resources/source_lemlist.md +++ b/docs/resources/source_lemlist.md @@ -18,9 +18,9 @@ resource "airbyte_source_lemlist" "my_source_lemlist" { api_key = "...my_api_key..." source_type = "lemlist" } - name = "Erika Jaskolski Jr." + name = "Gene Herman" secret_id = "...my_secret_id..." - workspace_id = "313b3e50-44f6-45fe-b2dc-4077d0cc3f40" + workspace_id = "72dc4077-d0cc-43f4-88ef-c15ceb4d6e1e" } ``` @@ -47,7 +47,7 @@ resource "airbyte_source_lemlist" "my_source_lemlist" { Required: -- `api_key` (String) Lemlist API key. +- `api_key` (String) Lemlist API key, - `source_type` (String) must be one of ["lemlist"] diff --git a/docs/resources/source_lever_hiring.md b/docs/resources/source_lever_hiring.md index ca303549f..3e3d97ffa 100644 --- a/docs/resources/source_lever_hiring.md +++ b/docs/resources/source_lever_hiring.md @@ -25,9 +25,9 @@ resource "airbyte_source_lever_hiring" "my_source_leverhiring" { source_type = "lever-hiring" start_date = "2021-03-01T00:00:00Z" } - name = "Mrs. Amos Ryan" + name = "Donald Wuckert" secret_id = "...my_secret_id..." - workspace_id = "eb4d6e1e-ae0f-475a-adf2-acab58b991c9" + workspace_id = "aedf2aca-b58b-4991-8926-ddb589461e74" } ``` diff --git a/docs/resources/source_linkedin_ads.md b/docs/resources/source_linkedin_ads.md index cdfe6e58d..958334248 100644 --- a/docs/resources/source_linkedin_ads.md +++ b/docs/resources/source_linkedin_ads.md @@ -20,8 +20,8 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" { ] ad_analytics_reports = [ { - name = "Mable Stroman" - pivot_by = "MEMBER_COMPANY_SIZE" + name = "Kara Rohan" + pivot_by = "MEMBER_REGION_V2" time_granularity = "MONTHLY" }, ] @@ -34,9 +34,9 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" { source_type = "linkedin-ads" start_date = "2021-05-17" } - name = "Leigh Kuhic" + name = "Elsa Adams" secret_id = "...my_secret_id..." - workspace_id = "1cbe6d95-02f0-4ea9-b0b6-9f7ac2f72f88" + workspace_id = "930b69f7-ac2f-472f-8850-090491160820" } ``` @@ -64,11 +64,11 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" { Required: - `source_type` (String) must be one of ["linkedin-ads"] -- `start_date` (String) UTC date in the format 2020-09-17. Any data before this date will not be replicated. +- `start_date` (String) UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. Optional: -- `account_ids` (List of Number) Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info. +- `account_ids` (List of Number) Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs. - `ad_analytics_reports` (Attributes List) (see [below for nested schema](#nestedatt--configuration--ad_analytics_reports)) - `credentials` (Attributes) (see [below for nested schema](#nestedatt--configuration--credentials)) @@ -77,15 +77,11 @@ Optional: Required: -- `name` (String) The name for the report +- `name` (String) The name for the custom report. - `pivot_by` (String) must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"] -Select value from list to pivot by +Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. - `time_granularity` (String) must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"] -Set time granularity for report: -ALL - Results grouped into a single result across the entire time range of the report. -DAILY - Results grouped by day. -MONTHLY - Results grouped by month. -YEARLY - Results grouped by year. +Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. @@ -103,7 +99,7 @@ Optional: Required: -- `access_token` (String) The token value generated using the authentication code. See the docs to obtain yours. +- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information. Optional: @@ -115,9 +111,9 @@ Optional: Required: -- `client_id` (String) The client ID of the LinkedIn Ads developer application. -- `client_secret` (String) The client secret the LinkedIn Ads developer application. -- `refresh_token` (String) The key to refresh the expired access token. +- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information. +- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information. +- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information. Optional: @@ -129,7 +125,7 @@ Optional: Required: -- `access_token` (String) The token value generated using the authentication code. See the docs to obtain yours. +- `access_token` (String) The access token generated for your developer application. Refer to our documentation for more information. Optional: @@ -141,9 +137,9 @@ Optional: Required: -- `client_id` (String) The client ID of the LinkedIn Ads developer application. -- `client_secret` (String) The client secret the LinkedIn Ads developer application. -- `refresh_token` (String) The key to refresh the expired access token. +- `client_id` (String) The client ID of your developer application. Refer to our documentation for more information. +- `client_secret` (String) The client secret of your developer application. Refer to our documentation for more information. +- `refresh_token` (String) The key to refresh the expired access token. Refer to our documentation for more information. Optional: diff --git a/docs/resources/source_linkedin_pages.md b/docs/resources/source_linkedin_pages.md index ec4ffa3a7..14ab6fbf6 100644 --- a/docs/resources/source_linkedin_pages.md +++ b/docs/resources/source_linkedin_pages.md @@ -24,9 +24,9 @@ resource "airbyte_source_linkedin_pages" "my_source_linkedinpages" { org_id = "123456789" source_type = "linkedin-pages" } - name = "Karen Barrows I" + name = "Tracey Kutch" secret_id = "...my_secret_id..." - workspace_id = "91160820-7888-4ec6-a183-bfe9659eb40e" + workspace_id = "c66183bf-e965-49eb-80ec-16faf75b0b53" } ``` diff --git a/docs/resources/source_linnworks.md b/docs/resources/source_linnworks.md index 43e11f548..80be8629f 100644 --- a/docs/resources/source_linnworks.md +++ b/docs/resources/source_linnworks.md @@ -18,12 +18,12 @@ resource "airbyte_source_linnworks" "my_source_linnworks" { application_id = "...my_application_id..." application_secret = "...my_application_secret..." source_type = "linnworks" - start_date = "2022-08-21T08:36:18.969Z" + start_date = "2022-05-04T07:21:12.859Z" token = "...my_token..." } - name = "Lana Osinski" + name = "Antonia Muller" secret_id = "...my_secret_id..." - workspace_id = "5b0b532a-4da3-47cb-aaf4-452c4842c9b2" + workspace_id = "cbaaf445-2c48-442c-9b2a-d32dafe81a88" } ``` diff --git a/docs/resources/source_lokalise.md b/docs/resources/source_lokalise.md index 445f240e4..c887b9bc6 100644 --- a/docs/resources/source_lokalise.md +++ b/docs/resources/source_lokalise.md @@ -19,9 +19,9 @@ resource "airbyte_source_lokalise" "my_source_lokalise" { project_id = "...my_project_id..." source_type = "lokalise" } - name = "Al Emmerich" + name = "Bernard Gottlieb" secret_id = "...my_secret_id..." - workspace_id = "afe81a88-f444-4457-bfec-d47353f63c82" + workspace_id = "573fecd4-7353-4f63-8820-9379aa69cd5f" } ``` diff --git a/docs/resources/source_mailchimp.md b/docs/resources/source_mailchimp.md index 05150a2db..b80c36148 100644 --- a/docs/resources/source_mailchimp.md +++ b/docs/resources/source_mailchimp.md @@ -24,9 +24,9 @@ resource "airbyte_source_mailchimp" "my_source_mailchimp" { } source_type = "mailchimp" } - name = "Faye Dicki" + name = "Benny Williamson" secret_id = "...my_secret_id..." - workspace_id = "aa69cd5f-bcf7-49da-98a7-822bf95894e6" + workspace_id = "da18a782-2bf9-4589-8e68-61adb55f9e5d" } ``` diff --git a/docs/resources/source_mailgun.md b/docs/resources/source_mailgun.md index 7dbd66a2f..c771986a8 100644 --- a/docs/resources/source_mailgun.md +++ b/docs/resources/source_mailgun.md @@ -20,9 +20,9 @@ resource "airbyte_source_mailgun" "my_source_mailgun" { source_type = "mailgun" start_date = "2023-08-01T00:00:00Z" } - name = "Lynda Schuppe" + name = "Sheri Mayert" secret_id = "...my_secret_id..." - workspace_id = "5f9e5d75-1c9f-4e8f-b502-bfdc3450841f" + workspace_id = "8f7502bf-dc34-4508-81f1-764456379f3f" } ``` diff --git a/docs/resources/source_mailjet_sms.md b/docs/resources/source_mailjet_sms.md index 6303aae02..80e5ef086 100644 --- a/docs/resources/source_mailjet_sms.md +++ b/docs/resources/source_mailjet_sms.md @@ -20,9 +20,9 @@ resource "airbyte_source_mailjet_sms" "my_source_mailjetsms" { start_date = 1666261656 token = "...my_token..." } - name = "Eleanor Gibson" + name = "Dr. Eloise Cronin" secret_id = "...my_secret_id..." - workspace_id = "379f3fb2-7e21-4f86-a657-b36fc6b9f587" + workspace_id = "62657b36-fc6b-49f5-87ce-525c67641a83" } ``` diff --git a/docs/resources/source_marketo.md b/docs/resources/source_marketo.md index 89d5da3af..f59260c4d 100644 --- a/docs/resources/source_marketo.md +++ b/docs/resources/source_marketo.md @@ -21,9 +21,9 @@ resource "airbyte_source_marketo" "my_source_marketo" { source_type = "marketo" start_date = "2020-09-25T00:00:00Z" } - name = "Sara Hegmann" + name = "Jerome Berge" secret_id = "...my_secret_id..." - workspace_id = "7641a831-2e50-447b-8c21-ccb423abcdc9" + workspace_id = "b4c21ccb-423a-4bcd-891f-aabdd88e71f6" } ``` diff --git a/docs/resources/source_metabase.md b/docs/resources/source_metabase.md index 01c32e95d..f6b805f97 100644 --- a/docs/resources/source_metabase.md +++ b/docs/resources/source_metabase.md @@ -19,11 +19,11 @@ resource "airbyte_source_metabase" "my_source_metabase" { password = "...my_password..." session_token = "...my_session_token..." source_type = "metabase" - username = "Audrey_Weimann71" + username = "Peyton.Green" } - name = "Jody Lebsack" + name = "Tammy Sporer" secret_id = "...my_secret_id..." - workspace_id = "71f6c482-52d7-4771-a7fd-074009ef8d29" + workspace_id = "71e7fd07-4009-4ef8-929d-e1dd7097b5da" } ``` diff --git a/docs/resources/source_microsoft_teams.md b/docs/resources/source_microsoft_teams.md index 90ad587a9..f7977bd25 100644 --- a/docs/resources/source_microsoft_teams.md +++ b/docs/resources/source_microsoft_teams.md @@ -26,9 +26,9 @@ resource "airbyte_source_microsoft_teams" "my_source_microsoftteams" { period = "D7" source_type = "microsoft-teams" } - name = "Noah Bernier" + name = "Brandy Ryan" secret_id = "...my_secret_id..." - workspace_id = "7097b5da-08c5-47fa-ac78-a216e19bafec" + workspace_id = "fa6c78a2-16e1-49ba-beca-6191498140b6" } ``` diff --git a/docs/resources/source_mixpanel.md b/docs/resources/source_mixpanel.md index e5a26fe72..4b56bde43 100644 --- a/docs/resources/source_mixpanel.md +++ b/docs/resources/source_mixpanel.md @@ -15,25 +15,25 @@ SourceMixpanel Resource ```terraform resource "airbyte_source_mixpanel" "my_source_mixpanel" { configuration = { - attribution_window = 7 + attribution_window = 2 credentials = { source_mixpanel_authentication_wildcard_project_secret = { api_secret = "...my_api_secret..." option_title = "Project Secret" } } - date_window_size = 4 + date_window_size = 10 end_date = "2021-11-16" - project_id = 0 - project_timezone = "US/Pacific" - region = "EU" + project_id = 7 + project_timezone = "UTC" + region = "US" select_properties_by_default = true source_type = "mixpanel" start_date = "2021-11-16" } - name = "Karla Hoppe" + name = "Donald Ernser" secret_id = "...my_secret_id..." - workspace_id = "f8ae170e-f03b-45f3-be4a-a86855596673" + workspace_id = "f37e4aa8-6855-4596-a732-aa5dcb6682cb" } ``` diff --git a/docs/resources/source_monday.md b/docs/resources/source_monday.md index a6818322d..ef5c56f9b 100644 --- a/docs/resources/source_monday.md +++ b/docs/resources/source_monday.md @@ -23,9 +23,9 @@ resource "airbyte_source_monday" "my_source_monday" { } source_type = "monday" } - name = "Lynda Padberg" + name = "Shirley Wisoky" secret_id = "...my_secret_id..." - workspace_id = "cb6682cb-70f8-4cfd-9fb6-e91b9a9f7484" + workspace_id = "fd5fb6e9-1b9a-49f7-8846-e2c3309db053" } ``` diff --git a/docs/resources/source_mongodb.md b/docs/resources/source_mongodb.md index 327eb47da..9b8bca0ff 100644 --- a/docs/resources/source_mongodb.md +++ b/docs/resources/source_mongodb.md @@ -27,9 +27,9 @@ resource "airbyte_source_mongodb" "my_source_mongodb" { source_type = "mongodb" user = "...my_user..." } - name = "Elvira Collins" + name = "Doreen Mayer" secret_id = "...my_secret_id..." - workspace_id = "309db053-6d9e-475c-a006-f5392c11a25a" + workspace_id = "5ca006f5-392c-411a-a5a8-bf92f97428ad" } ``` diff --git a/docs/resources/source_mongodb_internal_poc.md b/docs/resources/source_mongodb_internal_poc.md index f7cbed97d..c7044fa78 100644 --- a/docs/resources/source_mongodb_internal_poc.md +++ b/docs/resources/source_mongodb_internal_poc.md @@ -22,9 +22,9 @@ resource "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" { source_type = "mongodb-internal-poc" user = "...my_user..." } - name = "Dr. Cary McKenzie" + name = "Eduardo Weissnat" secret_id = "...my_secret_id..." - workspace_id = "7428ad9a-9f8b-4f82-a112-5359d98387f7" + workspace_id = "f8221125-359d-4983-87f7-a79cd72cd248" } ``` diff --git a/docs/resources/source_mssql.md b/docs/resources/source_mssql.md index 61ef87297..c8c1d5f2b 100644 --- a/docs/resources/source_mssql.md +++ b/docs/resources/source_mssql.md @@ -21,11 +21,11 @@ resource "airbyte_source_mssql" "my_source_mssql" { password = "...my_password..." port = 1433 replication_method = { - source_mssql_replication_method_logical_replication_cdc_ = { - data_to_sync = "Existing and New" - initial_waiting_seconds = 6 + source_mssql_update_method_read_changes_using_change_data_capture_cdc_ = { + data_to_sync = "New Changes Only" + initial_waiting_seconds = 7 method = "CDC" - snapshot_isolation = "Read Committed" + snapshot_isolation = "Snapshot" } } schemas = [ @@ -42,11 +42,11 @@ resource "airbyte_source_mssql" "my_source_mssql" { tunnel_method = "NO_TUNNEL" } } - username = "Rhianna75" + username = "Bobbie60" } - name = "Victor Gleason" + name = "Clarence Murazik" secret_id = "...my_secret_id..." - workspace_id = "da21729f-2ac4-41ef-9725-f1169ac1e41d" + workspace_id = "1ef5725f-1169-4ac1-a41d-8a23c23e34f2" } ``` @@ -83,7 +83,7 @@ Optional: - `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (example: key1=value1&key2=value2&key3=value3). - `password` (String) The password associated with the username. -- `replication_method` (Attributes) The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. (see [below for nested schema](#nestedatt--configuration--replication_method)) +- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method)) - `schemas` (List of String) The list of schemas to sync from. Defaults to user. Case sensitive. - `ssl_method` (Attributes) The encryption method which is used when communicating with the database. (see [below for nested schema](#nestedatt--configuration--ssl_method)) - `tunnel_method` (Attributes) Whether to initiate an SSH tunnel before connecting to the database, and if so, which kind of authentication to use. (see [below for nested schema](#nestedatt--configuration--tunnel_method)) @@ -93,13 +93,13 @@ Optional: Optional: -- `source_mssql_replication_method_logical_replication_cdc` (Attributes) CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_replication_method_logical_replication_cdc)) -- `source_mssql_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_replication_method_standard)) -- `source_mssql_update_replication_method_logical_replication_cdc` (Attributes) CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_replication_method_logical_replication_cdc)) -- `source_mssql_update_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_replication_method_standard)) +- `source_mssql_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_read_changes_using_change_data_capture_cdc)) +- `source_mssql_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_method_scan_changes_with_user_defined_cursor)) +- `source_mssql_update_update_method_read_changes_using_change_data_capture_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_read_changes_using_change_data_capture_cdc)) +- `source_mssql_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_mssql_update_update_method_scan_changes_with_user_defined_cursor)) - -### Nested Schema for `configuration.replication_method.source_mssql_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_mssql_update_method_read_changes_using_change_data_capture_cdc` Required: @@ -114,16 +114,16 @@ What data should be synced under the CDC. "Existing and New" will read existing Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. - -### Nested Schema for `configuration.replication_method.source_mssql_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_mssql_update_method_scan_changes_with_user_defined_cursor` Required: - `method` (String) must be one of ["STANDARD"] - -### Nested Schema for `configuration.replication_method.source_mssql_update_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_read_changes_using_change_data_capture_cdc` Required: @@ -138,8 +138,8 @@ What data should be synced under the CDC. "Existing and New" will read existing Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. - -### Nested Schema for `configuration.replication_method.source_mssql_update_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_mssql_update_update_method_scan_changes_with_user_defined_cursor` Required: diff --git a/docs/resources/source_my_hours.md b/docs/resources/source_my_hours.md index b54d87b97..0085bed7f 100644 --- a/docs/resources/source_my_hours.md +++ b/docs/resources/source_my_hours.md @@ -19,11 +19,11 @@ resource "airbyte_source_my_hours" "my_source_myhours" { logs_batch_size = 30 password = "...my_password..." source_type = "my-hours" - start_date = "%Y-%m-%d" + start_date = "2016-01-01" } - name = "Allen Grant" + name = "Elsa Kerluke" secret_id = "...my_secret_id..." - workspace_id = "dfa4a197-f6de-4922-951f-e1712099853e" + workspace_id = "922151fe-1712-4099-853e-9f543d854439" } ``` diff --git a/docs/resources/source_mysql.md b/docs/resources/source_mysql.md index d18895782..b945dfda0 100644 --- a/docs/resources/source_mysql.md +++ b/docs/resources/source_mysql.md @@ -38,11 +38,11 @@ resource "airbyte_source_mysql" "my_source_mysql" { tunnel_method = "NO_TUNNEL" } } - username = "Eusebio86" + username = "Carley25" } - name = "Herman Greenfelder" + name = "Ruth Goodwin" secret_id = "...my_secret_id..." - workspace_id = "9ee22446-0443-4bc1-9418-8c2f56e85da7" + workspace_id = "bc154188-c2f5-46e8-9da7-832eabd617c3" } ``` diff --git a/docs/resources/source_netsuite.md b/docs/resources/source_netsuite.md index 85bacd31c..5d9f80f52 100644 --- a/docs/resources/source_netsuite.md +++ b/docs/resources/source_netsuite.md @@ -25,11 +25,11 @@ resource "airbyte_source_netsuite" "my_source_netsuite" { start_datetime = "2017-01-25T00:00:00Z" token_key = "...my_token_key..." token_secret = "...my_token_secret..." - window_in_days = 5 + window_in_days = 7 } - name = "Judy Towne" + name = "Miss Meredith Hand" secret_id = "...my_secret_id..." - workspace_id = "d617c3b0-d51a-444b-b01b-ad8706d46082" + workspace_id = "4bf01bad-8706-4d46-882b-fbdc41ff5d4e" } ``` diff --git a/docs/resources/source_notion.md b/docs/resources/source_notion.md index a427a1c08..4bcf04c38 100644 --- a/docs/resources/source_notion.md +++ b/docs/resources/source_notion.md @@ -24,9 +24,9 @@ resource "airbyte_source_notion" "my_source_notion" { source_type = "notion" start_date = "2020-11-16T00:00:00.000Z" } - name = "Dewey Schmeler DVM" + name = "Francisco Yost" secret_id = "...my_secret_id..." - workspace_id = "f5d4e2ae-4fb5-4cb3-9d17-638f1edb7835" + workspace_id = "cb35d176-38f1-4edb-b835-9ecc5cb860f8" } ``` diff --git a/docs/resources/source_nytimes.md b/docs/resources/source_nytimes.md index f54582403..ff83b2189 100644 --- a/docs/resources/source_nytimes.md +++ b/docs/resources/source_nytimes.md @@ -17,14 +17,14 @@ resource "airbyte_source_nytimes" "my_source_nytimes" { configuration = { api_key = "...my_api_key..." end_date = "1851-01" - period = "30" + period = "7" share_type = "facebook" source_type = "nytimes" start_date = "2022-08" } - name = "Cecil Bauch" + name = "Mr. Emily Macejkovic" secret_id = "...my_secret_id..." - workspace_id = "cd580ba7-3810-4e4f-a444-7297cd3b1dd3" + workspace_id = "4fe44472-97cd-43b1-9d3b-bce247b7684e" } ``` diff --git a/docs/resources/source_okta.md b/docs/resources/source_okta.md index fbee0a763..818e96444 100644 --- a/docs/resources/source_okta.md +++ b/docs/resources/source_okta.md @@ -25,9 +25,9 @@ resource "airbyte_source_okta" "my_source_okta" { source_type = "okta" start_date = "2022-07-22T00:00:00Z" } - name = "Randolph Russel" + name = "Mr. Emmett Heidenreich" secret_id = "...my_secret_id..." - workspace_id = "47b7684e-ff50-4126-971c-ffbd0eb74b84" + workspace_id = "6d71cffb-d0eb-474b-8421-953b44bd3c43" } ``` diff --git a/docs/resources/source_omnisend.md b/docs/resources/source_omnisend.md index f60ea720a..f0b9461ad 100644 --- a/docs/resources/source_omnisend.md +++ b/docs/resources/source_omnisend.md @@ -18,9 +18,9 @@ resource "airbyte_source_omnisend" "my_source_omnisend" { api_key = "...my_api_key..." source_type = "omnisend" } - name = "Virginia Mitchell" + name = "Lynn Miller" secret_id = "...my_secret_id..." - workspace_id = "b44bd3c4-3159-4d33-a595-3c001139863a" + workspace_id = "3e5953c0-0113-4986-baa4-1e6c31cc2f1f" } ``` diff --git a/docs/resources/source_onesignal.md b/docs/resources/source_onesignal.md index 9a514a80b..e0b682980 100644 --- a/docs/resources/source_onesignal.md +++ b/docs/resources/source_onesignal.md @@ -27,9 +27,9 @@ resource "airbyte_source_onesignal" "my_source_onesignal" { start_date = "2020-11-16T00:00:00Z" user_auth_key = "...my_user_auth_key..." } - name = "Laverne Jacobs" + name = "Joan Schaefer" secret_id = "...my_secret_id..." - workspace_id = "1cc2f1fc-b51c-49a4-9ffb-e9cbd795ee65" + workspace_id = "41ffbe9c-bd79-45ee-a5e0-76cc7abf616e" } ``` diff --git a/docs/resources/source_openweather.md b/docs/resources/source_openweather.md deleted file mode 100644 index 26fb6605f..000000000 --- a/docs/resources/source_openweather.md +++ /dev/null @@ -1,66 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "airbyte_source_openweather Resource - terraform-provider-airbyte" -subcategory: "" -description: |- - SourceOpenweather Resource ---- - -# airbyte_source_openweather (Resource) - -SourceOpenweather Resource - -## Example Usage - -```terraform -resource "airbyte_source_openweather" "my_source_openweather" { - configuration = { - appid = "...my_appid..." - lang = "zh_tw" - lat = "45.7603" - lon = "4.835659" - source_type = "openweather" - units = "metric" - } - name = "Sylvester Krajcik" - secret_id = "...my_secret_id..." - workspace_id = "f616ea5c-7164-4193-8b90-f2e09d19d2fc" -} -``` - - -## Schema - -### Required - -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) -- `name` (String) -- `workspace_id` (String) - -### Optional - -- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow. - -### Read-Only - -- `source_id` (String) -- `source_type` (String) - - -### Nested Schema for `configuration` - -Required: - -- `appid` (String) Your OpenWeather API Key. See here. The key is case sensitive. -- `lat` (String) Latitude for which you want to get weather condition from. (min -90, max 90) -- `lon` (String) Longitude for which you want to get weather condition from. (min -180, max 180) -- `source_type` (String) must be one of ["openweather"] - -Optional: - -- `lang` (String) must be one of ["af", "al", "ar", "az", "bg", "ca", "cz", "da", "de", "el", "en", "eu", "fa", "fi", "fr", "gl", "he", "hi", "hr", "hu", "id", "it", "ja", "kr", "la", "lt", "mk", "no", "nl", "pl", "pt", "pt_br", "ro", "ru", "sv", "se", "sk", "sl", "sp", "es", "sr", "th", "tr", "ua", "uk", "vi", "zh_cn", "zh_tw", "zu"] -You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. -- `units` (String) must be one of ["standard", "metric", "imperial"] -Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - - diff --git a/docs/resources/source_oracle.md b/docs/resources/source_oracle.md index b55648604..093d12f88 100644 --- a/docs/resources/source_oracle.md +++ b/docs/resources/source_oracle.md @@ -23,14 +23,14 @@ resource "airbyte_source_oracle" "my_source_oracle" { } encryption = { source_oracle_encryption_native_network_encryption_nne_ = { - encryption_algorithm = "AES256" + encryption_algorithm = "RC4_56" encryption_method = "client_nne" } } host = "...my_host..." jdbc_url_params = "...my_jdbc_url_params..." password = "...my_password..." - port = 10 + port = 4 schemas = [ "...", ] @@ -40,11 +40,11 @@ resource "airbyte_source_oracle" "my_source_oracle" { tunnel_method = "NO_TUNNEL" } } - username = "Lila92" + username = "Oswaldo42" } - name = "Barbara Hilll" + name = "Cheryl McKenzie" secret_id = "...my_secret_id..." - workspace_id = "4b935d23-7a72-4f90-849d-6aed4aecb753" + workspace_id = "b90f2e09-d19d-42fc-af9e-2e105944b935" } ``` diff --git a/docs/resources/source_orb.md b/docs/resources/source_orb.md index d812917df..d6144b287 100644 --- a/docs/resources/source_orb.md +++ b/docs/resources/source_orb.md @@ -16,7 +16,7 @@ SourceOrb Resource resource "airbyte_source_orb" "my_source_orb" { configuration = { api_key = "...my_api_key..." - lookback_window_days = 5 + lookback_window_days = 9 numeric_event_properties_keys = [ "...", ] @@ -28,9 +28,9 @@ resource "airbyte_source_orb" "my_source_orb" { ] subscription_usage_grouping_key = "...my_subscription_usage_grouping_key..." } - name = "Josh Mante" + name = "Josephine Kilback" secret_id = "...my_secret_id..." - workspace_id = "2c9ff574-91aa-4bfa-ae76-1f0ca4d456ef" + workspace_id = "2f90849d-6aed-44ae-8b75-37cd9222c9ff" } ``` diff --git a/docs/resources/source_orbit.md b/docs/resources/source_orbit.md index ec0f15afa..6f9cbf315 100644 --- a/docs/resources/source_orbit.md +++ b/docs/resources/source_orbit.md @@ -20,9 +20,9 @@ resource "airbyte_source_orbit" "my_source_orbit" { start_date = "...my_start_date..." workspace = "...my_workspace..." } - name = "Dr. Nancy Ferry" + name = "Jo Greenholt V" secret_id = "...my_secret_id..." - workspace_id = "899f0c20-01e2-42cd-95cc-0584a184d76d" + workspace_id = "abfa2e76-1f0c-4a4d-856e-f1031e6899f0" } ``` diff --git a/docs/resources/source_outbrain_amplify.md b/docs/resources/source_outbrain_amplify.md index af8f76dc1..18a19739c 100644 --- a/docs/resources/source_outbrain_amplify.md +++ b/docs/resources/source_outbrain_amplify.md @@ -22,14 +22,14 @@ resource "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" { } } end_date = "...my_end_date..." - geo_location_breakdown = "region" - report_granularity = "weekly" + geo_location_breakdown = "subregion" + report_granularity = "daily" source_type = "outbrain-amplify" start_date = "...my_start_date..." } - name = "Johanna Runolfsson Jr." + name = "Cynthia Boyer" secret_id = "...my_secret_id..." - workspace_id = "c65b037b-b8e0-4cc8-8518-7e4de04af28c" + workspace_id = "2cd55cc0-584a-4184-976d-971fc820c65b" } ``` diff --git a/docs/resources/source_outreach.md b/docs/resources/source_outreach.md index 557d8443b..585c858dc 100644 --- a/docs/resources/source_outreach.md +++ b/docs/resources/source_outreach.md @@ -22,9 +22,9 @@ resource "airbyte_source_outreach" "my_source_outreach" { source_type = "outreach" start_date = "2020-11-16T00:00:00Z" } - name = "Rosemarie Schulist" + name = "Kim Kirlin" secret_id = "...my_secret_id..." - workspace_id = "46aa1cfd-6d82-48da-8131-91129646645c" + workspace_id = "8e0cc885-187e-44de-84af-28c5dddb46aa" } ``` diff --git a/docs/resources/source_paypal_transaction.md b/docs/resources/source_paypal_transaction.md index 205c5fb84..d78081001 100644 --- a/docs/resources/source_paypal_transaction.md +++ b/docs/resources/source_paypal_transaction.md @@ -22,9 +22,9 @@ resource "airbyte_source_paypal_transaction" "my_source_paypaltransaction" { source_type = "paypal-transaction" start_date = "2021-06-11T23:59:59+00:00" } - name = "Mrs. Roy Moore" + name = "Ernestine Little" secret_id = "...my_secret_id..." - workspace_id = "f569b7af-f0ea-4221-acbe-071bc163e279" + workspace_id = "da013191-1296-4466-85c1-d81f29042f56" } ``` diff --git a/docs/resources/source_paystack.md b/docs/resources/source_paystack.md index d2619253b..68355ea78 100644 --- a/docs/resources/source_paystack.md +++ b/docs/resources/source_paystack.md @@ -15,14 +15,14 @@ SourcePaystack Resource ```terraform resource "airbyte_source_paystack" "my_source_paystack" { configuration = { - lookback_window_days = 7 + lookback_window_days = 6 secret_key = "...my_secret_key..." source_type = "paystack" start_date = "2017-01-25T00:00:00Z" } - name = "Mattie Gutkowski" + name = "Dr. Boyd Wilderman" secret_id = "...my_secret_id..." - workspace_id = "99257d04-f408-447a-b42d-84496cbdeecf" + workspace_id = "2216cbe0-71bc-4163-a279-a3b084da9925" } ``` diff --git a/docs/resources/source_pendo.md b/docs/resources/source_pendo.md index c5bfd9826..8f1f63acb 100644 --- a/docs/resources/source_pendo.md +++ b/docs/resources/source_pendo.md @@ -18,9 +18,9 @@ resource "airbyte_source_pendo" "my_source_pendo" { api_key = "...my_api_key..." source_type = "pendo" } - name = "Bridget Medhurst" + name = "Estelle Bechtelar" secret_id = "...my_secret_id..." - workspace_id = "c63562eb-fdf5-45c2-94c0-60b06a128776" + workspace_id = "40847a74-2d84-4496-8bde-ecf6b99bc635" } ``` diff --git a/docs/resources/source_persistiq.md b/docs/resources/source_persistiq.md index 5e044a857..aaa226c27 100644 --- a/docs/resources/source_persistiq.md +++ b/docs/resources/source_persistiq.md @@ -18,9 +18,9 @@ resource "airbyte_source_persistiq" "my_source_persistiq" { api_key = "...my_api_key..." source_type = "persistiq" } - name = "Silvia Toy" + name = "Nicole Vandervort" secret_id = "...my_secret_id..." - workspace_id = "d0c6d6ed-9c73-4dd6-b457-1509a8e870d3" + workspace_id = "df55c294-c060-4b06-a128-7764eef6d0c6" } ``` diff --git a/docs/resources/source_pexels_api.md b/docs/resources/source_pexels_api.md index cd8a62d0f..3f0f8b35a 100644 --- a/docs/resources/source_pexels_api.md +++ b/docs/resources/source_pexels_api.md @@ -19,13 +19,13 @@ resource "airbyte_source_pexels_api" "my_source_pexelsapi" { color = "orange" locale = "en-US" orientation = "landscape" - query = "people" + query = "oceans" size = "small" source_type = "pexels-api" } - name = "Wilbert Cummings" + name = "Arnold Dooley" secret_id = "...my_secret_id..." - workspace_id = "c7b66a1f-30c7-43df-9b67-19890f42a4bb" + workspace_id = "63457150-9a8e-4870-93c5-a1f9c242c7b6" } ``` diff --git a/docs/resources/source_pinterest.md b/docs/resources/source_pinterest.md index 45fd49b4c..6805e0477 100644 --- a/docs/resources/source_pinterest.md +++ b/docs/resources/source_pinterest.md @@ -24,12 +24,12 @@ resource "airbyte_source_pinterest" "my_source_pinterest" { source_type = "pinterest" start_date = "2022-07-28" status = [ - "PAUSED", + "ACTIVE", ] } - name = "Jordan Hegmann" + name = "Nathan Bauch" secret_id = "...my_secret_id..." - workspace_id = "60591d74-5e3c-4205-9c9c-3f567e0e2527" + workspace_id = "3df5b671-9890-4f42-a4bb-438d85b26059" } ``` diff --git a/docs/resources/source_pipedrive.md b/docs/resources/source_pipedrive.md index 865536664..0d844c962 100644 --- a/docs/resources/source_pipedrive.md +++ b/docs/resources/source_pipedrive.md @@ -22,9 +22,9 @@ resource "airbyte_source_pipedrive" "my_source_pipedrive" { replication_start_date = "2017-01-25T00:00:00Z" source_type = "pipedrive" } - name = "Albert Sipes" + name = "Rhonda Hammes" secret_id = "...my_secret_id..." - workspace_id = "fcdace1f-0121-46ce-a239-e8f25cd0d19d" + workspace_id = "c2059c9c-3f56-47e0-a252-765b1d62fcda" } ``` diff --git a/docs/resources/source_pocket.md b/docs/resources/source_pocket.md index 50884723a..740aec074 100644 --- a/docs/resources/source_pocket.md +++ b/docs/resources/source_pocket.md @@ -17,8 +17,8 @@ resource "airbyte_source_pocket" "my_source_pocket" { configuration = { access_token = "...my_access_token..." consumer_key = "...my_consumer_key..." - content_type = "video" - detail_type = "simple" + content_type = "image" + detail_type = "complete" domain = "...my_domain..." favorite = true search = "...my_search..." @@ -28,9 +28,9 @@ resource "airbyte_source_pocket" "my_source_pocket" { state = "unread" tag = "...my_tag..." } - name = "Ada Tromp" + name = "Christina Bode" secret_id = "...my_secret_id..." - workspace_id = "266cbd95-f7aa-42b2-8113-695d1e6698fc" + workspace_id = "e2239e8f-25cd-40d1-9d95-9f439e39266c" } ``` diff --git a/docs/resources/source_pokeapi.md b/docs/resources/source_pokeapi.md index 731548269..209cba1b0 100644 --- a/docs/resources/source_pokeapi.md +++ b/docs/resources/source_pokeapi.md @@ -18,9 +18,9 @@ resource "airbyte_source_pokeapi" "my_source_pokeapi" { pokemon_name = "snorlax" source_type = "pokeapi" } - name = "Lynn Mertz" + name = "Jeremiah Hahn" secret_id = "...my_secret_id..." - workspace_id = "17c29776-7633-4425-8038-bfb5971e9819" + workspace_id = "aa2b2411-3695-4d1e-a698-fcc4596217c2" } ``` diff --git a/docs/resources/source_polygon_stock_api.md b/docs/resources/source_polygon_stock_api.md index bf0b88e11..5a80bed58 100644 --- a/docs/resources/source_polygon_stock_api.md +++ b/docs/resources/source_polygon_stock_api.md @@ -15,20 +15,20 @@ SourcePolygonStockAPI Resource ```terraform resource "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" { configuration = { - adjusted = "true" + adjusted = "false" api_key = "...my_api_key..." end_date = "2020-10-14" limit = 100 - multiplier = 2 - sort = "desc" + multiplier = 1 + sort = "asc" source_type = "polygon-stock-api" start_date = "2020-10-14" - stocks_ticker = "MSFT" + stocks_ticker = "IBM" timespan = "day" } - name = "Spencer Kirlin" + name = "Mary Fisher" secret_id = "...my_secret_id..." - workspace_id = "a39594d6-6bc2-4ae4-8063-2b9954b6fa22" + workspace_id = "fb5971e9-8190-4557-b89c-edbac7fda395" } ``` diff --git a/docs/resources/source_postgres.md b/docs/resources/source_postgres.md index 9855bc918..304d0512c 100644 --- a/docs/resources/source_postgres.md +++ b/docs/resources/source_postgres.md @@ -21,14 +21,8 @@ resource "airbyte_source_postgres" "my_source_postgres" { password = "...my_password..." port = 5432 replication_method = { - source_postgres_replication_method_logical_replication_cdc_ = { - initial_waiting_seconds = 4 - lsn_commit_behaviour = "While reading Data" - method = "CDC" - plugin = "pgoutput" - publication = "...my_publication..." - queue_size = 4 - replication_slot = "...my_replication_slot..." + source_postgres_update_method_detect_changes_with_xmin_system_column = { + method = "Xmin" } } schemas = [ @@ -45,11 +39,11 @@ resource "airbyte_source_postgres" "my_source_postgres" { tunnel_method = "NO_TUNNEL" } } - username = "Kendrick52" + username = "Edwardo.Streich" } - name = "Vivian Dietrich" + name = "Roosevelt Cummings" secret_id = "...my_secret_id..." - workspace_id = "10006bef-4921-4ec2-853b-749366ac8ee0" + workspace_id = "480632b9-954b-46fa-a206-369828553cb1" } ``` @@ -86,7 +80,7 @@ Optional: - `jdbc_url_params` (String) Additional properties to pass to the JDBC URL string when connecting to the database formatted as 'key=value' pairs separated by the symbol '&'. (Eg. key1=value1&key2=value2&key3=value3). For more information read about JDBC URL parameters. - `password` (String) Password associated with the username. -- `replication_method` (Attributes) Replication method for extracting data from the database. (see [below for nested schema](#nestedatt--configuration--replication_method)) +- `replication_method` (Attributes) Configures how data is extracted from the database. (see [below for nested schema](#nestedatt--configuration--replication_method)) - `schemas` (List of String) The list of schemas (case sensitive) to sync from. Defaults to public. - `ssl_mode` (Attributes) SSL connection modes. Read more in the docs. (see [below for nested schema](#nestedatt--configuration--ssl_mode)) @@ -97,15 +91,23 @@ Optional: Optional: -- `source_postgres_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_replication_method_logical_replication_cdc)) -- `source_postgres_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_replication_method_standard)) -- `source_postgres_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_replication_method_standard_xmin)) -- `source_postgres_update_replication_method_logical_replication_cdc` (Attributes) Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_replication_method_logical_replication_cdc)) -- `source_postgres_update_replication_method_standard` (Attributes) Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_replication_method_standard)) -- `source_postgres_update_replication_method_standard_xmin` (Attributes) Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_replication_method_standard_xmin)) +- `source_postgres_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_detect_changes_with_xmin_system_column)) +- `source_postgres_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_read_changes_using_write_ahead_log_cdc)) +- `source_postgres_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_method_scan_changes_with_user_defined_cursor)) +- `source_postgres_update_update_method_detect_changes_with_xmin_system_column` (Attributes) Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_detect_changes_with_xmin_system_column)) +- `source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc` (Attributes) Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc)) +- `source_postgres_update_update_method_scan_changes_with_user_defined_cursor` (Attributes) Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). (see [below for nested schema](#nestedatt--configuration--replication_method--source_postgres_update_update_method_scan_changes_with_user_defined_cursor)) - -### Nested Schema for `configuration.replication_method.source_postgres_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_postgres_update_method_detect_changes_with_xmin_system_column` + +Required: + +- `method` (String) must be one of ["Xmin"] + + + +### Nested Schema for `configuration.replication_method.source_postgres_update_method_read_changes_using_write_ahead_log_cdc` Required: @@ -124,24 +126,24 @@ A logical decoding plugin installed on the PostgreSQL server. - `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful. - -### Nested Schema for `configuration.replication_method.source_postgres_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_postgres_update_method_scan_changes_with_user_defined_cursor` Required: - `method` (String) must be one of ["Standard"] - -### Nested Schema for `configuration.replication_method.source_postgres_replication_method_standard_xmin` + +### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_detect_changes_with_xmin_system_column` Required: - `method` (String) must be one of ["Xmin"] - -### Nested Schema for `configuration.replication_method.source_postgres_update_replication_method_logical_replication_cdc` + +### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc` Required: @@ -160,22 +162,14 @@ A logical decoding plugin installed on the PostgreSQL server. - `queue_size` (Number) The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful. - -### Nested Schema for `configuration.replication_method.source_postgres_update_replication_method_standard` + +### Nested Schema for `configuration.replication_method.source_postgres_update_update_method_scan_changes_with_user_defined_cursor` Required: - `method` (String) must be one of ["Standard"] - -### Nested Schema for `configuration.replication_method.source_postgres_update_replication_method_standard_xmin` - -Required: - -- `method` (String) must be one of ["Xmin"] - - ### Nested Schema for `configuration.ssl_mode` diff --git a/docs/resources/source_posthog.md b/docs/resources/source_posthog.md index 8b51dae83..27fbc1d87 100644 --- a/docs/resources/source_posthog.md +++ b/docs/resources/source_posthog.md @@ -15,14 +15,15 @@ SourcePosthog Resource ```terraform resource "airbyte_source_posthog" "my_source_posthog" { configuration = { - api_key = "...my_api_key..." - base_url = "https://posthog.example.com" - source_type = "posthog" - start_date = "2021-01-01T00:00:00Z" + api_key = "...my_api_key..." + base_url = "https://posthog.example.com" + events_time_step = 30 + source_type = "posthog" + start_date = "2021-01-01T00:00:00Z" } - name = "Darrin Bogisich" + name = "Terence Wisozk" secret_id = "...my_secret_id..." - workspace_id = "88d40d03-f3de-4ba2-97be-3e90bc40df86" + workspace_id = "21ec2053-b749-4366-ac8e-e0f2bf19588d" } ``` @@ -56,5 +57,6 @@ Required: Optional: - `base_url` (String) Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com). +- `events_time_step` (Number) Set lower value in case of failing long running sync of events stream. diff --git a/docs/resources/source_postmarkapp.md b/docs/resources/source_postmarkapp.md index 4cd772af5..e9925ed62 100644 --- a/docs/resources/source_postmarkapp.md +++ b/docs/resources/source_postmarkapp.md @@ -19,9 +19,9 @@ resource "airbyte_source_postmarkapp" "my_source_postmarkapp" { x_postmark_account_token = "...my_x_postmark_account_token..." x_postmark_server_token = "...my_x_postmark_server_token..." } - name = "Boyd Stoltenberg" + name = "Mr. Sharon Swift" secret_id = "...my_secret_id..." - workspace_id = "405cb331-d492-4f4f-927f-b0e0bf1f8217" + workspace_id = "3deba297-be3e-490b-840d-f868fd52405c" } ``` diff --git a/docs/resources/source_prestashop.md b/docs/resources/source_prestashop.md index d88825452..bfcc11bc1 100644 --- a/docs/resources/source_prestashop.md +++ b/docs/resources/source_prestashop.md @@ -20,9 +20,9 @@ resource "airbyte_source_prestashop" "my_source_prestashop" { start_date = "2022-01-01" url = "...my_url..." } - name = "Drew Adams" + name = "Evelyn Stracke" secret_id = "...my_secret_id..." - workspace_id = "ca77aeb7-b702-41a5-a046-b64e99fb0e67" + workspace_id = "2f4f127f-b0e0-4bf1-b821-7978d0acca77" } ``` diff --git a/docs/resources/source_public_apis.md b/docs/resources/source_public_apis.md deleted file mode 100644 index f4bb46817..000000000 --- a/docs/resources/source_public_apis.md +++ /dev/null @@ -1,51 +0,0 @@ ---- -# generated by https://github.com/hashicorp/terraform-plugin-docs -page_title: "airbyte_source_public_apis Resource - terraform-provider-airbyte" -subcategory: "" -description: |- - SourcePublicApis Resource ---- - -# airbyte_source_public_apis (Resource) - -SourcePublicApis Resource - -## Example Usage - -```terraform -resource "airbyte_source_public_apis" "my_source_publicapis" { - configuration = { - source_type = "public-apis" - } - name = "James McLaughlin" - secret_id = "...my_secret_id..." - workspace_id = "dfed5540-ef53-4a34-a1b8-fe99731adc05" -} -``` - - -## Schema - -### Required - -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) -- `name` (String) -- `workspace_id` (String) - -### Optional - -- `secret_id` (String) Optional secretID obtained through the public API OAuth redirect flow. - -### Read-Only - -- `source_id` (String) -- `source_type` (String) - - -### Nested Schema for `configuration` - -Required: - -- `source_type` (String) must be one of ["public-apis"] - - diff --git a/docs/resources/source_punk_api.md b/docs/resources/source_punk_api.md index cd4febe1b..33fb6c703 100644 --- a/docs/resources/source_punk_api.md +++ b/docs/resources/source_punk_api.md @@ -20,9 +20,9 @@ resource "airbyte_source_punk_api" "my_source_punkapi" { id = 22 source_type = "punk-api" } - name = "Santos Langosh" + name = "Darnell Turcotte" secret_id = "...my_secret_id..." - workspace_id = "51bd76ee-eb51-48c4-9a1f-ad35512f06d4" + workspace_id = "540ef53a-34a1-4b8f-a997-31adc05d85ae" } ``` diff --git a/docs/resources/source_pypi.md b/docs/resources/source_pypi.md index 0f270a205..a6b79c0f9 100644 --- a/docs/resources/source_pypi.md +++ b/docs/resources/source_pypi.md @@ -19,9 +19,9 @@ resource "airbyte_source_pypi" "my_source_pypi" { source_type = "pypi" version = "1.2.0" } - name = "Bill Reichert" + name = "Antonia Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "f0f54856-8a04-424e-80a1-d6eb9434645d" + workspace_id = "0fb38742-90d3-4365-a1ec-a16ef89451bd" } ``` diff --git a/docs/resources/source_qualaroo.md b/docs/resources/source_qualaroo.md index 4058d96d5..6659a5d39 100644 --- a/docs/resources/source_qualaroo.md +++ b/docs/resources/source_qualaroo.md @@ -23,9 +23,9 @@ resource "airbyte_source_qualaroo" "my_source_qualaroo" { ] token = "...my_token..." } - name = "Grace Ankunding" + name = "Sue Thompson" secret_id = "...my_secret_id..." - workspace_id = "fbba5cce-ff5c-4b01-be51-e528a45ac82b" + workspace_id = "b518c4da-1fad-4355-92f0-6d4e5b72f0f5" } ``` diff --git a/docs/resources/source_quickbooks.md b/docs/resources/source_quickbooks.md index 4ec74ae2e..c23c86ec2 100644 --- a/docs/resources/source_quickbooks.md +++ b/docs/resources/source_quickbooks.md @@ -23,16 +23,16 @@ resource "airbyte_source_quickbooks" "my_source_quickbooks" { client_secret = "...my_client_secret..." realm_id = "...my_realm_id..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2022-04-17T11:28:41.720Z" + token_expiry_date = "2022-06-15T23:02:57.447Z" } } sandbox = false source_type = "quickbooks" start_date = "2021-03-20T00:00:00Z" } - name = "Nicholas Schroeder" + name = "William Gottlieb" secret_id = "...my_secret_id..." - workspace_id = "a8da4127-dd59-47ff-8711-aa1bc74b86ce" + workspace_id = "e00a1d6e-b943-4464-9d03-084fbba5ccef" } ``` diff --git a/docs/resources/source_railz.md b/docs/resources/source_railz.md index 34a41755f..09a7c18fa 100644 --- a/docs/resources/source_railz.md +++ b/docs/resources/source_railz.md @@ -20,9 +20,9 @@ resource "airbyte_source_railz" "my_source_railz" { source_type = "railz" start_date = "...my_start_date..." } - name = "Myron Kunze" + name = "Clyde Schmeler Jr." secret_id = "...my_secret_id..." - workspace_id = "77b4848b-d6a6-4f04-81d2-c3b808094373" + workspace_id = "fe51e528-a45a-4c82-b85f-8bc2caba8da4" } ``` diff --git a/docs/resources/source_recharge.md b/docs/resources/source_recharge.md index 468af4038..c3ab7a31b 100644 --- a/docs/resources/source_recharge.md +++ b/docs/resources/source_recharge.md @@ -19,9 +19,9 @@ resource "airbyte_source_recharge" "my_source_recharge" { source_type = "recharge" start_date = "2021-05-14T00:00:00Z" } - name = "Karen Green" + name = "Angel Stokes" secret_id = "...my_secret_id..." - workspace_id = "bebbad02-f258-46bc-b152-558daa95be6c" + workspace_id = "7ff4711a-a1bc-474b-86ce-cc74f77b4848" } ``` diff --git a/docs/resources/source_recreation.md b/docs/resources/source_recreation.md index afc7bbc50..9b923b0f5 100644 --- a/docs/resources/source_recreation.md +++ b/docs/resources/source_recreation.md @@ -19,9 +19,9 @@ resource "airbyte_source_recreation" "my_source_recreation" { query_campsites = "...my_query_campsites..." source_type = "recreation" } - name = "David Deckow" + name = "Taylor Kertzmann" secret_id = "...my_secret_id..." - workspace_id = "6c354aa4-32b4-47e1-b63c-5208c23e9802" + workspace_id = "f0441d2c-3b80-4809-8373-e060459bebba" } ``` diff --git a/docs/resources/source_recruitee.md b/docs/resources/source_recruitee.md index 49ab743b1..ff172c647 100644 --- a/docs/resources/source_recruitee.md +++ b/docs/resources/source_recruitee.md @@ -19,9 +19,9 @@ resource "airbyte_source_recruitee" "my_source_recruitee" { company_id = 9 source_type = "recruitee" } - name = "Dr. Sean Williamson" + name = "Mrs. Tina White" secret_id = "...my_secret_id..." - workspace_id = "5eb4a8b6-74ee-45cf-818e-dc7f787e32e0" + workspace_id = "6bcf1525-58da-4a95-be6c-d02756c354aa" } ``` diff --git a/docs/resources/source_recurly.md b/docs/resources/source_recurly.md index 69f3187a6..4d6e8253e 100644 --- a/docs/resources/source_recurly.md +++ b/docs/resources/source_recurly.md @@ -20,9 +20,9 @@ resource "airbyte_source_recurly" "my_source_recurly" { end_time = "2021-12-01T00:00:00" source_type = "recurly" } - name = "Susie Donnelly" + name = "Josephine Dibbert" secret_id = "...my_secret_id..." - workspace_id = "ed0c5670-ef42-4bd3-89f1-cc503f6c39bc" + workspace_id = "7e1763c5-208c-423e-9802-d82f0d45eb4a" } ``` diff --git a/docs/resources/source_redshift.md b/docs/resources/source_redshift.md index 2d5ae04f8..8a38f454e 100644 --- a/docs/resources/source_redshift.md +++ b/docs/resources/source_redshift.md @@ -24,11 +24,11 @@ resource "airbyte_source_redshift" "my_source_redshift" { "...", ] source_type = "redshift" - username = "Abdullah.Oberbrunner" + username = "Nelda.Jaskolski" } - name = "Edward Wolf" + name = "Clay Hintz" secret_id = "...my_secret_id..." - workspace_id = "7f385189-ad7e-4f80-baae-03f33ca79fb9" + workspace_id = "c18edc7f-787e-432e-84b3-d3ed0c5670ef" } ``` diff --git a/docs/resources/source_retently.md b/docs/resources/source_retently.md index 8803c7123..6cf7842b1 100644 --- a/docs/resources/source_retently.md +++ b/docs/resources/source_retently.md @@ -25,9 +25,9 @@ resource "airbyte_source_retently" "my_source_retently" { } source_type = "retently" } - name = "Mrs. Lamar Fritsch" + name = "Kelly Pfeffer" secret_id = "...my_secret_id..." - workspace_id = "ba26fd36-8ba9-4216-bcb4-15835c736417" + workspace_id = "c9f1cc50-3f6c-439b-8d0a-6290f957f385" } ``` diff --git a/docs/resources/source_rki_covid.md b/docs/resources/source_rki_covid.md index bcbdc1a11..9f0c9d8c2 100644 --- a/docs/resources/source_rki_covid.md +++ b/docs/resources/source_rki_covid.md @@ -18,9 +18,9 @@ resource "airbyte_source_rki_covid" "my_source_rkicovid" { source_type = "rki-covid" start_date = "...my_start_date..." } - name = "Diana Bogisich" + name = "Penny Morissette" secret_id = "...my_secret_id..." - workspace_id = "edc046bc-5163-4bbc-a492-27c42c22c553" + workspace_id = "7ef807aa-e03f-433c-a79f-b9de4032ba26" } ``` diff --git a/docs/resources/source_rss.md b/docs/resources/source_rss.md index 56465d919..dfb2fe4b5 100644 --- a/docs/resources/source_rss.md +++ b/docs/resources/source_rss.md @@ -18,9 +18,9 @@ resource "airbyte_source_rss" "my_source_rss" { source_type = "rss" url = "...my_url..." } - name = "Donna Gottlieb" + name = "Gustavo Donnelly" secret_id = "...my_secret_id..." - workspace_id = "c5dbb3c5-7c1e-4498-9e8a-a257ddc1912e" + workspace_id = "ba9216bc-b415-4835-8736-41723133edc0" } ``` diff --git a/docs/resources/source_s3.md b/docs/resources/source_s3.md index d0ad5e70b..de1bd4527 100644 --- a/docs/resources/source_s3.md +++ b/docs/resources/source_s3.md @@ -15,13 +15,17 @@ SourceS3 Resource ```terraform resource "airbyte_source_s3" "my_source_s3" { configuration = { - dataset = "...my_dataset..." + aws_access_key_id = "...my_aws_access_key_id..." + aws_secret_access_key = "...my_aws_secret_access_key..." + bucket = "...my_bucket..." + dataset = "...my_dataset..." + endpoint = "...my_endpoint..." format = { source_s3_file_format_avro = { filetype = "avro" } } - path_pattern = "myFolder/myTableFiles/*.csv|myFolder/myOtherTableFiles/*.csv" + path_pattern = "**" provider = { aws_access_key_id = "...my_aws_access_key_id..." aws_secret_access_key = "...my_aws_secret_access_key..." @@ -32,10 +36,32 @@ resource "airbyte_source_s3" "my_source_s3" { } schema = "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}" source_type = "s3" + start_date = "2021-01-01T00:00:00.000000Z" + streams = [ + { + days_to_sync_if_history_is_full = 1 + file_type = "...my_file_type..." + format = { + source_s3_file_based_stream_config_format_avro_format = { + double_as_string = true + filetype = "avro" + } + } + globs = [ + "...", + ] + input_schema = "...my_input_schema..." + legacy_prefix = "...my_legacy_prefix..." + name = "Flora Rempel" + primary_key = "...my_primary_key..." + schemaless = false + validation_policy = "Skip Record" + }, + ] } - name = "Ellen Reilly" + name = "Jacqueline Kiehn" secret_id = "...my_secret_id..." - workspace_id = "c5469d40-15df-4a79-a206-bef2b0a3e42c" + workspace_id = "2c22c553-5049-45c5-9bb3-c57c1e4981e8" } ``` @@ -44,7 +70,8 @@ resource "airbyte_source_s3" "my_source_s3" { ### Required -- `configuration` (Attributes) (see [below for nested schema](#nestedatt--configuration)) +- `configuration` (Attributes) NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes +because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. (see [below for nested schema](#nestedatt--configuration)) - `name` (String) - `workspace_id` (String) @@ -62,30 +89,228 @@ resource "airbyte_source_s3" "my_source_s3" { Required: -- `dataset` (String) The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. -- `path_pattern` (String) A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. -- `provider` (Attributes) Use this to load files from S3 or S3-compatible services (see [below for nested schema](#nestedatt--configuration--provider)) +- `bucket` (String) Name of the S3 bucket where the file(s) exist. - `source_type` (String) must be one of ["s3"] +- `streams` (Attributes List) Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. (see [below for nested schema](#nestedatt--configuration--streams)) Optional: -- `format` (Attributes) The format of the files you'd like to replicate (see [below for nested schema](#nestedatt--configuration--format)) -- `schema` (String) Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. +- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. +- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. +- `dataset` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. +- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS. +- `format` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate (see [below for nested schema](#nestedatt--configuration--format)) +- `path_pattern` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. +- `provider` (Attributes) Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services (see [below for nested schema](#nestedatt--configuration--provider)) +- `schema` (String) Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. +- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. - -### Nested Schema for `configuration.provider` + +### Nested Schema for `configuration.streams` Required: -- `bucket` (String) Name of the S3 bucket where the file(s) exist. +- `file_type` (String) The data file type that is being extracted for a stream. +- `name` (String) The name of the stream. Optional: -- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. -- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. -- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS. -- `path_prefix` (String) By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate. -- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated. +- `days_to_sync_if_history_is_full` (Number) When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. +- `format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format)) +- `globs` (List of String) The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. +- `input_schema` (String) The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. +- `legacy_prefix` (String) The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. +- `primary_key` (String) The column or columns (for a composite key) that serves as the unique identifier of a record. +- `schemaless` (Boolean) When enabled, syncs will not validate or structure records against the stream's schema. +- `validation_policy` (String) must be one of ["Emit Record", "Skip Record", "Wait for Discover"] +The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + + +### Nested Schema for `configuration.streams.format` + +Optional: + +- `source_s3_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_avro_format)) +- `source_s3_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_csv_format)) +- `source_s3_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_jsonl_format)) +- `source_s3_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_file_based_stream_config_format_parquet_format)) +- `source_s3_update_file_based_stream_config_format_avro_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_avro_format)) +- `source_s3_update_file_based_stream_config_format_csv_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_csv_format)) +- `source_s3_update_file_based_stream_config_format_jsonl_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_jsonl_format)) +- `source_s3_update_file_based_stream_config_format_parquet_format` (Attributes) The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format)) + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. +- `filetype` (String) must be one of ["avro"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. +- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data. +- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. +- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank. +- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values. +- `filetype` (String) must be one of ["csv"] +- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition)) +- `inference_type` (String) must be one of ["None", "Primitive Types Only"] +How to infer the types of the columns. If none, inference default to strings. +- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. +- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank. +- `skip_rows_after_header` (Number) The number of rows to skip after the header row. +- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. +- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. +- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values. + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition` + +Optional: + +- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated)) +- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv)) +- `source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided)) + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Optional: + +- `header_definition_type` (String) must be one of ["Autogenerated"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Optional: + +- `header_definition_type` (String) must be one of ["From CSV"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Required: + +- `column_names` (List of String) The column names that will be used while emitting the CSV records + +Optional: + +- `header_definition_type` (String) must be one of ["User Provided"] + + + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `filetype` (String) must be one of ["jsonl"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. +- `filetype` (String) must be one of ["parquet"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `double_as_string` (Boolean) Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. +- `filetype` (String) must be one of ["avro"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `delimiter` (String) The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. +- `double_quote` (Boolean) Whether two quotes in a quoted CSV value denote a single quote in the data. +- `encoding` (String) The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. +- `escape_char` (String) The character used for escaping special characters. To disallow escaping, leave this field blank. +- `false_values` (List of String) A set of case-sensitive strings that should be interpreted as false values. +- `filetype` (String) must be one of ["csv"] +- `header_definition` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition)) +- `inference_type` (String) must be one of ["None", "Primitive Types Only"] +How to infer the types of the columns. If none, inference default to strings. +- `null_values` (List of String) A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. +- `quote_char` (String) The character used for quoting CSV values. To disallow quoting, make this field blank. +- `skip_rows_after_header` (Number) The number of rows to skip after the header row. +- `skip_rows_before_header` (Number) The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. +- `strings_can_be_null` (Boolean) Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. +- `true_values` (List of String) A set of case-sensitive strings that should be interpreted as true values. + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition` + +Optional: + +- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated)) +- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv)) +- `source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` (Attributes) How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. (see [below for nested schema](#nestedatt--configuration--streams--format--source_s3_update_file_based_stream_config_format_parquet_format--header_definition--source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided)) + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Optional: + +- `header_definition_type` (String) must be one of ["Autogenerated"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Optional: + +- `header_definition_type` (String) must be one of ["From CSV"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format.header_definition.source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided` + +Required: + +- `column_names` (List of String) The column names that will be used while emitting the CSV records + +Optional: + +- `header_definition_type` (String) must be one of ["User Provided"] + + + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `filetype` (String) must be one of ["jsonl"] + + + +### Nested Schema for `configuration.streams.format.source_s3_update_file_based_stream_config_format_parquet_format` + +Optional: + +- `decimal_as_float` (Boolean) Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. +- `filetype` (String) must be one of ["parquet"] + + @@ -200,3 +425,17 @@ Optional: - `filetype` (String) must be one of ["parquet"] + + +### Nested Schema for `configuration.provider` + +Optional: + +- `aws_access_key_id` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. +- `aws_secret_access_key` (String) In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. +- `bucket` (String) Name of the S3 bucket where the file(s) exist. +- `endpoint` (String) Endpoint to an S3 compatible service. Leave empty to use AWS. +- `path_prefix` (String) By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate. +- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated. + + diff --git a/docs/resources/source_salesforce.md b/docs/resources/source_salesforce.md index 6f9d09467..d3d243efe 100644 --- a/docs/resources/source_salesforce.md +++ b/docs/resources/source_salesforce.md @@ -19,20 +19,20 @@ resource "airbyte_source_salesforce" "my_source_salesforce" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." force_use_bulk_api = true - is_sandbox = true + is_sandbox = false refresh_token = "...my_refresh_token..." source_type = "salesforce" - start_date = "2021-07-25T00:00:00Z" + start_date = "2021-07-25" streams_criteria = [ { - criteria = "starts with" + criteria = "not contains" value = "...my_value..." }, ] } - name = "Andy Paucek" + name = "Gregg Boyer Sr." secret_id = "...my_secret_id..." - workspace_id = "2e913558-6d18-4f9f-97a4-bfad2bf7d67c" + workspace_id = "ebde64bf-cc54-469d-8015-dfa796206bef" } ``` diff --git a/docs/resources/source_salesloft.md b/docs/resources/source_salesloft.md index 259e57185..a90fdd6c6 100644 --- a/docs/resources/source_salesloft.md +++ b/docs/resources/source_salesloft.md @@ -24,9 +24,9 @@ resource "airbyte_source_salesloft" "my_source_salesloft" { source_type = "salesloft" start_date = "2020-11-16T00:00:00Z" } - name = "Amelia Simonis" + name = "Lynda Dicki" secret_id = "...my_secret_id..." - workspace_id = "b41d6124-3531-4870-8f68-b03ad421bd43" + workspace_id = "2c1aa010-e9aa-4c2e-9135-586d18f9f97a" } ``` diff --git a/docs/resources/source_sap_fieldglass.md b/docs/resources/source_sap_fieldglass.md index 34669a855..5e2a052ca 100644 --- a/docs/resources/source_sap_fieldglass.md +++ b/docs/resources/source_sap_fieldglass.md @@ -18,9 +18,9 @@ resource "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" { api_key = "...my_api_key..." source_type = "sap-fieldglass" } - name = "Miss Jack Wintheiser" + name = "Juana Williamson" secret_id = "...my_secret_id..." - workspace_id = "0a0003eb-22d9-4b3a-b0d9-4faa741c57d1" + workspace_id = "2bf7d67c-a84a-4d99-b41d-61243531870c" } ``` diff --git a/docs/resources/source_secoda.md b/docs/resources/source_secoda.md index 7088f6615..14e946432 100644 --- a/docs/resources/source_secoda.md +++ b/docs/resources/source_secoda.md @@ -18,9 +18,9 @@ resource "airbyte_source_secoda" "my_source_secoda" { api_key = "...my_api_key..." source_type = "secoda" } - name = "Ignacio Sporer" + name = "Brett Leannon I" secret_id = "...my_secret_id..." - workspace_id = "050d38dc-3ce1-4854-b2f9-ee69166a8be3" + workspace_id = "ad421bd4-3d1f-40cb-8a00-03eb22d9b3a7" } ``` diff --git a/docs/resources/source_sendgrid.md b/docs/resources/source_sendgrid.md index 7a89d99f8..8ec44ae9f 100644 --- a/docs/resources/source_sendgrid.md +++ b/docs/resources/source_sendgrid.md @@ -19,9 +19,9 @@ resource "airbyte_source_sendgrid" "my_source_sendgrid" { source_type = "sendgrid" start_time = "2020-01-01T01:01:01Z" } - name = "Dana Sauer" + name = "Shari Pfannerstill" secret_id = "...my_secret_id..." - workspace_id = "3a2875c6-c1fe-4606-907d-2a9c87ae50c1" + workspace_id = "41c57d1f-edc2-4050-938d-c3ce185472f9" } ``` diff --git a/docs/resources/source_sendinblue.md b/docs/resources/source_sendinblue.md index f02b3207f..0d69e89da 100644 --- a/docs/resources/source_sendinblue.md +++ b/docs/resources/source_sendinblue.md @@ -18,9 +18,9 @@ resource "airbyte_source_sendinblue" "my_source_sendinblue" { api_key = "...my_api_key..." source_type = "sendinblue" } - name = "Miss Loretta Howell PhD" + name = "Terence Kassulke III" secret_id = "...my_secret_id..." - workspace_id = "9136a7e8-d532-413f-bf65-8752db764c59" + workspace_id = "6a8be344-4eac-48b3-a287-5c6c1fe606d0" } ``` diff --git a/docs/resources/source_senseforce.md b/docs/resources/source_senseforce.md index a95126d37..17f4207e8 100644 --- a/docs/resources/source_senseforce.md +++ b/docs/resources/source_senseforce.md @@ -18,13 +18,13 @@ resource "airbyte_source_senseforce" "my_source_senseforce" { access_token = "...my_access_token..." backend_url = "https://galaxyapi.senseforce.io" dataset_id = "8f418098-ca28-4df5-9498-0df9fe78eda7" - slice_range = 360 + slice_range = 10 source_type = "senseforce" start_date = "2017-01-25" } - name = "Nichole Treutel" + name = "Rodolfo Langworth" secret_id = "...my_secret_id..." - workspace_id = "ada29ca7-9181-4c95-a716-63c530b56651" + workspace_id = "e50c1666-1a1d-4913-aa7e-8d53213f3f65" } ``` diff --git a/docs/resources/source_sentry.md b/docs/resources/source_sentry.md index bdb9cbfd0..dfd8b7e3c 100644 --- a/docs/resources/source_sentry.md +++ b/docs/resources/source_sentry.md @@ -19,14 +19,14 @@ resource "airbyte_source_sentry" "my_source_sentry" { discover_fields = [ "{ \"see\": \"documentation\" }", ] - hostname = "imperfect-creator.name" + hostname = "muted-ingredient.biz" organization = "...my_organization..." project = "...my_project..." source_type = "sentry" } - name = "Laurie Farrell" + name = "Krystal Quitzon" secret_id = "...my_secret_id..." - workspace_id = "12ab2521-b9f2-4e07-a467-b8a40bc05fab" + workspace_id = "4c59f0a5-6ceb-4cad-a29c-a79181c95671" } ``` diff --git a/docs/resources/source_sftp.md b/docs/resources/source_sftp.md index 0a940524d..c922caf87 100644 --- a/docs/resources/source_sftp.md +++ b/docs/resources/source_sftp.md @@ -24,14 +24,14 @@ resource "airbyte_source_sftp" "my_source_sftp" { file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" file_types = "csv,json" folder_path = "/logs/2022" - host = "192.0.2.1" + host = "www.host.com" port = 22 source_type = "sftp" user = "...my_user..." } - name = "Dorothy Waters" + name = "Miss Tommy Emard" secret_id = "...my_secret_id..." - workspace_id = "22a94d20-ec90-4ea4-9d1f-465e85156fff" + workspace_id = "665163a3-6385-412a-b252-1b9f2e072467" } ``` diff --git a/docs/resources/source_sftp_bulk.md b/docs/resources/source_sftp_bulk.md index 986b8370e..0fc7a449a 100644 --- a/docs/resources/source_sftp_bulk.md +++ b/docs/resources/source_sftp_bulk.md @@ -15,9 +15,9 @@ SourceSftpBulk Resource ```terraform resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" { configuration = { - file_most_recent = true + file_most_recent = false file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" - file_type = "csv" + file_type = "json" folder_path = "/logs/2022" host = "192.0.2.1" password = "...my_password..." @@ -27,11 +27,11 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" { source_type = "sftp-bulk" start_date = "2017-01-25T00:00:00Z" stream_name = "ftp_contacts" - username = "Donny_Wuckert34" + username = "Pearline_Bailey" } - name = "Doug Marvin" + name = "Wm Bartoletti" secret_id = "...my_secret_id..." - workspace_id = "3398dafb-42a8-4d63-b88e-4d8039ea5f9b" + workspace_id = "50edf22a-94d2-40ec-90ea-41d1f465e851" } ``` diff --git a/docs/resources/source_shopify.md b/docs/resources/source_shopify.md index 2cf3edce3..3fe62e4f9 100644 --- a/docs/resources/source_shopify.md +++ b/docs/resources/source_shopify.md @@ -23,11 +23,11 @@ resource "airbyte_source_shopify" "my_source_shopify" { } shop = "my-store" source_type = "shopify" - start_date = "2021-01-01" + start_date = "2022-01-02" } - name = "Rhonda Gislason" + name = "Randal Kris" secret_id = "...my_secret_id..." - workspace_id = "619039da-cd38-4ed0-9c67-1dc7f1e3af15" + workspace_id = "df54fdd5-ea95-4433-98da-fb42a8d63388" } ``` @@ -56,11 +56,11 @@ Required: - `shop` (String) The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'. - `source_type` (String) must be one of ["shopify"] -- `start_date` (String) The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. Optional: - `credentials` (Attributes) The authorization method to use to retrieve data from Shopify (see [below for nested schema](#nestedatt--configuration--credentials)) +- `start_date` (String) The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. ### Nested Schema for `configuration.credentials` diff --git a/docs/resources/source_shortio.md b/docs/resources/source_shortio.md index 34797bfd8..7ece64e90 100644 --- a/docs/resources/source_shortio.md +++ b/docs/resources/source_shortio.md @@ -20,9 +20,9 @@ resource "airbyte_source_shortio" "my_source_shortio" { source_type = "shortio" start_date = "2023-07-30T03:43:59.244Z" } - name = "Carlos Armstrong" + name = "Troy Streich I" secret_id = "...my_secret_id..." - workspace_id = "0d1b4901-f2bd-489c-8a32-639da5b7b690" + workspace_id = "9ea5f9b1-8a24-44fd-a190-39dacd38ed0d" } ``` diff --git a/docs/resources/source_slack.md b/docs/resources/source_slack.md index 40383600e..3b9897835 100644 --- a/docs/resources/source_slack.md +++ b/docs/resources/source_slack.md @@ -24,14 +24,14 @@ resource "airbyte_source_slack" "my_source_slack" { option_title = "API Token Credentials" } } - join_channels = true - lookback_window = 14 + join_channels = false + lookback_window = 7 source_type = "slack" start_date = "2017-01-25T00:00:00Z" } - name = "Eduardo Gottlieb" + name = "Dr. Jamie Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "43664a8f-0af8-4c69-9d73-2d9fbaf9476a" + workspace_id = "af15920c-90d1-4b49-81f2-bd89c8a32639" } ``` diff --git a/docs/resources/source_smaily.md b/docs/resources/source_smaily.md index 21d8df9da..9094ce421 100644 --- a/docs/resources/source_smaily.md +++ b/docs/resources/source_smaily.md @@ -20,9 +20,9 @@ resource "airbyte_source_smaily" "my_source_smaily" { api_username = "...my_api_username..." source_type = "smaily" } - name = "Lynda Thiel" + name = "Donnie Hauck" secret_id = "...my_secret_id..." - workspace_id = "cc50c8a3-512c-4737-8489-30750a00e966" + workspace_id = "b6902b88-1a94-4f64-b664-a8f0af8c691d" } ``` diff --git a/docs/resources/source_smartengage.md b/docs/resources/source_smartengage.md index 58a7a9760..c654b4525 100644 --- a/docs/resources/source_smartengage.md +++ b/docs/resources/source_smartengage.md @@ -18,9 +18,9 @@ resource "airbyte_source_smartengage" "my_source_smartengage" { api_key = "...my_api_key..." source_type = "smartengage" } - name = "Guillermo Kunze" + name = "Carmen Crist" secret_id = "...my_secret_id..." - workspace_id = "d4319439-8c78-43c9-a398-ed3d3ab7ca3c" + workspace_id = "fbaf9476-a2ae-48dc-850c-8a3512c73784" } ``` diff --git a/docs/resources/source_smartsheets.md b/docs/resources/source_smartsheets.md index 63ccff4f3..ecdad7c4a 100644 --- a/docs/resources/source_smartsheets.md +++ b/docs/resources/source_smartsheets.md @@ -22,15 +22,15 @@ resource "airbyte_source_smartsheets" "my_source_smartsheets" { } } metadata_fields = [ - "sheetversion", + "row_access_level", ] source_type = "smartsheets" spreadsheet_id = "...my_spreadsheet_id..." start_datetime = "2000-01-01T13:00:00-07:00" } - name = "Michele Muller" + name = "Joann Bechtelar Jr." secret_id = "...my_secret_id..." - workspace_id = "0cfd5d69-89b7-4206-8510-77d19ea83d49" + workspace_id = "e966ec73-6d43-4194-b98c-783c92398ed3" } ``` diff --git a/docs/resources/source_snapchat_marketing.md b/docs/resources/source_snapchat_marketing.md index 7cb90233f..058f57af5 100644 --- a/docs/resources/source_snapchat_marketing.md +++ b/docs/resources/source_snapchat_marketing.md @@ -22,9 +22,9 @@ resource "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" { source_type = "snapchat-marketing" start_date = "2022-01-01" } - name = "Hannah MacGyver" + name = "Chelsea Ortiz" secret_id = "...my_secret_id..." - workspace_id = "c1954545-e955-4dcc-985e-a4901c7c43ad" + workspace_id = "5ca8649a-70cf-4d5d-a989-b7206451077d" } ``` diff --git a/docs/resources/source_snowflake.md b/docs/resources/source_snowflake.md index 1fc157898..e2189edc1 100644 --- a/docs/resources/source_snowflake.md +++ b/docs/resources/source_snowflake.md @@ -32,9 +32,9 @@ resource "airbyte_source_snowflake" "my_source_snowflake" { source_type = "snowflake" warehouse = "AIRBYTE_WAREHOUSE" } - name = "Betsy Osinski" + name = "Katrina Tillman" secret_id = "...my_secret_id..." - workspace_id = "84aba3d2-30ed-4f73-811a-115382bd7ed5" + workspace_id = "3d492ed1-4b8a-42c1-9545-45e955dcc185" } ``` diff --git a/docs/resources/source_sonar_cloud.md b/docs/resources/source_sonar_cloud.md index a2b156cd9..4ef4699fa 100644 --- a/docs/resources/source_sonar_cloud.md +++ b/docs/resources/source_sonar_cloud.md @@ -24,9 +24,9 @@ resource "airbyte_source_sonar_cloud" "my_source_sonarcloud" { start_date = "YYYY-MM-DD" user_token = "...my_user_token..." } - name = "Judy Bogan" + name = "Mildred Rosenbaum" secret_id = "...my_secret_id..." - workspace_id = "8f4d7396-564c-420a-8711-a961d24a7dbb" + workspace_id = "43ad2daa-784a-4ba3-9230-edf73811a115" } ``` diff --git a/docs/resources/source_spacex_api.md b/docs/resources/source_spacex_api.md index 50b679538..4fa19f065 100644 --- a/docs/resources/source_spacex_api.md +++ b/docs/resources/source_spacex_api.md @@ -15,13 +15,13 @@ SourceSpacexAPI Resource ```terraform resource "airbyte_source_spacex_api" "my_source_spacexapi" { configuration = { - id = "8f532d89-2cf7-4812-8b51-2c878240bf54" + id = "382bd7ed-5650-4762-9c58-f4d7396564c2" options = "...my_options..." source_type = "spacex-api" } - name = "Randal Lockman" + name = "Lee Batz Jr." secret_id = "...my_secret_id..." - workspace_id = "8f1bf0bc-8e1f-4206-95d8-31d0081090f6" + workspace_id = "a961d24a-7dbb-48f5-b2d8-92cf7812cb51" } ``` diff --git a/docs/resources/source_square.md b/docs/resources/source_square.md index 3e6b03d01..ff8865d50 100644 --- a/docs/resources/source_square.md +++ b/docs/resources/source_square.md @@ -21,14 +21,14 @@ resource "airbyte_source_square" "my_source_square" { auth_type = "API Key" } } - include_deleted_objects = false + include_deleted_objects = true is_sandbox = false source_type = "square" - start_date = "2022-08-13" + start_date = "2022-02-01" } - name = "Josephine Yundt" + name = "Miss Bruce Gibson" secret_id = "...my_secret_id..." - workspace_id = "681c5768-dce7-4424-89a2-15e08601489a" + workspace_id = "548f88f8-f1bf-40bc-8e1f-206d5d831d00" } ``` diff --git a/docs/resources/source_strava.md b/docs/resources/source_strava.md index b3efc76d2..0c6532b7e 100644 --- a/docs/resources/source_strava.md +++ b/docs/resources/source_strava.md @@ -23,9 +23,9 @@ resource "airbyte_source_strava" "my_source_strava" { source_type = "strava" start_date = "2021-03-01T00:00:00Z" } - name = "Henrietta Durgan" + name = "Jeffrey Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "3dd9dda3-3dcd-4634-83e4-a7a98e4df37e" + workspace_id = "06673f3a-681c-4576-8dce-742409a215e0" } ``` diff --git a/docs/resources/source_stripe.md b/docs/resources/source_stripe.md index 79b842e3b..78a2b3f52 100644 --- a/docs/resources/source_stripe.md +++ b/docs/resources/source_stripe.md @@ -17,14 +17,14 @@ resource "airbyte_source_stripe" "my_source_stripe" { configuration = { account_id = "...my_account_id..." client_secret = "...my_client_secret..." - lookback_window_days = 3 + lookback_window_days = 5 slice_range = 10 source_type = "stripe" start_date = "2017-01-25T00:00:00Z" } - name = "Bernice Schultz I" + name = "Seth Nitzsche" secret_id = "...my_secret_id..." - workspace_id = "e13a4823-1090-47bd-b54c-092bd734f024" + workspace_id = "63e3af3d-d9dd-4a33-9cd6-3483e4a7a98e" } ``` @@ -54,11 +54,11 @@ Required: - `account_id` (String) Your Stripe account ID (starts with 'acct_', find yours here). - `client_secret` (String) Stripe API key (usually starts with 'sk_live_'; find yours here). - `source_type` (String) must be one of ["stripe"] -- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. Optional: -- `lookback_window_days` (Number) When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here +- `lookback_window_days` (Number) When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here - `slice_range` (Number) The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted. +- `start_date` (String) UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. diff --git a/docs/resources/source_survey_sparrow.md b/docs/resources/source_survey_sparrow.md index cd5355b0e..4b759c695 100644 --- a/docs/resources/source_survey_sparrow.md +++ b/docs/resources/source_survey_sparrow.md @@ -26,9 +26,9 @@ resource "airbyte_source_survey_sparrow" "my_source_surveysparrow" { "{ \"see\": \"documentation\" }", ] } - name = "Merle Keebler Jr." + name = "Hugo Kovacek" secret_id = "...my_secret_id..." - workspace_id = "e9db3ad4-c6b0-4310-8d9c-337473082b94" + workspace_id = "f02449d8-6f4b-4b20-be5d-911cbfe749ca" } ``` diff --git a/docs/resources/source_surveymonkey.md b/docs/resources/source_surveymonkey.md index 44d215e52..3d3fd7a8b 100644 --- a/docs/resources/source_surveymonkey.md +++ b/docs/resources/source_surveymonkey.md @@ -28,9 +28,9 @@ resource "airbyte_source_surveymonkey" "my_source_surveymonkey" { "...", ] } - name = "Cecil Wintheiser" + name = "Pearl Trantow" secret_id = "...my_secret_id..." - workspace_id = "b20fe5d9-11cb-4fe7-89ca-f45a27f69e2c" + workspace_id = "b8955d41-3e13-4a48-a310-907bd354c092" } ``` diff --git a/docs/resources/source_tempo.md b/docs/resources/source_tempo.md index da25973a9..cf95bc67f 100644 --- a/docs/resources/source_tempo.md +++ b/docs/resources/source_tempo.md @@ -18,9 +18,9 @@ resource "airbyte_source_tempo" "my_source_tempo" { api_token = "...my_api_token..." source_type = "tempo" } - name = "Alan O'Conner DVM" + name = "Edwin Haley" secret_id = "...my_secret_id..." - workspace_id = "d5671e9c-3263-450a-8671-43789ce0e991" + workspace_id = "7f69e2c9-e6d1-40e9-9b3a-d4c6b03108d9" } ``` diff --git a/docs/resources/source_the_guardian_api.md b/docs/resources/source_the_guardian_api.md index d3d6f090c..15c04bafb 100644 --- a/docs/resources/source_the_guardian_api.md +++ b/docs/resources/source_the_guardian_api.md @@ -17,15 +17,15 @@ resource "airbyte_source_the_guardian_api" "my_source_theguardianapi" { configuration = { api_key = "...my_api_key..." end_date = "YYYY-MM-DD" - query = "environment AND political" - section = "technology" + query = "political" + section = "media" source_type = "the-guardian-api" start_date = "YYYY-MM-DD" tag = "environment/recycling" } - name = "Arturo Fay" + name = "Pauline Kozey IV" secret_id = "...my_secret_id..." - workspace_id = "4c0252fe-3b4b-44db-8b77-8ebb6e1d2cf5" + workspace_id = "2b94f2ab-1fd5-4671-a9c3-26350a467143" } ``` diff --git a/docs/resources/source_tiktok_marketing.md b/docs/resources/source_tiktok_marketing.md index be85a63cb..3929518fe 100644 --- a/docs/resources/source_tiktok_marketing.md +++ b/docs/resources/source_tiktok_marketing.md @@ -15,7 +15,7 @@ SourceTiktokMarketing Resource ```terraform resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" { configuration = { - attribution_window = 0 + attribution_window = 5 credentials = { source_tiktok_marketing_authentication_method_o_auth2_0 = { access_token = "...my_access_token..." @@ -25,14 +25,14 @@ resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" { secret = "...my_secret..." } } - end_date = "2022-04-16" - include_deleted = true + end_date = "2021-10-08" + include_deleted = false source_type = "tiktok-marketing" - start_date = "2020-11-25" + start_date = "2022-12-21" } - name = "Vicky Reichert" + name = "Mrs. Joey Mueller" secret_id = "...my_secret_id..." - workspace_id = "635d5e65-da02-48c3-a951-a1e30fda9664" + workspace_id = "4d93a74c-0252-4fe3-b4b4-db8b778ebb6e" } ``` diff --git a/docs/resources/source_todoist.md b/docs/resources/source_todoist.md index 585f2a30c..a9ec4940e 100644 --- a/docs/resources/source_todoist.md +++ b/docs/resources/source_todoist.md @@ -18,9 +18,9 @@ resource "airbyte_source_todoist" "my_source_todoist" { source_type = "todoist" token = "...my_token..." } - name = "Tracy Senger" + name = "Hope Collins" secret_id = "...my_secret_id..." - workspace_id = "78673e13-a12a-46b9-9249-4594487f5c84" + workspace_id = "502bafb2-cbc4-4635-95e6-5da028c3e951" } ``` diff --git a/docs/resources/source_trello.md b/docs/resources/source_trello.md index ad569fe1e..73e81dbc3 100644 --- a/docs/resources/source_trello.md +++ b/docs/resources/source_trello.md @@ -23,9 +23,9 @@ resource "airbyte_source_trello" "my_source_trello" { start_date = "2021-03-01T00:00:00Z" token = "...my_token..." } - name = "Rosemary Rice" + name = "Philip Armstrong" secret_id = "...my_secret_id..." - workspace_id = "b3cdf641-5b04-449f-9df1-3f4eedbe78bf" + workspace_id = "a966489d-7b78-4673-a13a-12a6b9924945" } ``` diff --git a/docs/resources/source_trustpilot.md b/docs/resources/source_trustpilot.md index 4f060817f..6187d6e0e 100644 --- a/docs/resources/source_trustpilot.md +++ b/docs/resources/source_trustpilot.md @@ -27,9 +27,9 @@ resource "airbyte_source_trustpilot" "my_source_trustpilot" { source_type = "trustpilot" start_date = "%Y-%m-%dT%H:%M:%S" } - name = "Betty Jast" + name = "Bradley Goodwin" secret_id = "...my_secret_id..." - workspace_id = "5894ea76-3d5c-4727-95b7-85148d6d549e" + workspace_id = "f5c84383-6b86-4b3c-9f64-15b0449f9df1" } ``` diff --git a/docs/resources/source_tvmaze_schedule.md b/docs/resources/source_tvmaze_schedule.md index 50f3739c4..8c7e0937d 100644 --- a/docs/resources/source_tvmaze_schedule.md +++ b/docs/resources/source_tvmaze_schedule.md @@ -19,11 +19,11 @@ resource "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" { end_date = "...my_end_date..." source_type = "tvmaze-schedule" start_date = "...my_start_date..." - web_schedule_country_code = "GB" + web_schedule_country_code = "global" } - name = "Audrey Rippin" + name = "Gretchen Waters" secret_id = "...my_secret_id..." - workspace_id = "bc0f970c-42fc-49f4-8442-25e75b796065" + workspace_id = "e78bf606-8258-494e-a763-d5c72795b785" } ``` diff --git a/docs/resources/source_twilio.md b/docs/resources/source_twilio.md index 29932481d..e4b40a3ad 100644 --- a/docs/resources/source_twilio.md +++ b/docs/resources/source_twilio.md @@ -21,9 +21,9 @@ resource "airbyte_source_twilio" "my_source_twilio" { source_type = "twilio" start_date = "2020-10-01T00:00:00Z" } - name = "Oliver Kautzer" + name = "Andre Sporer" secret_id = "...my_secret_id..." - workspace_id = "3b90a1b8-c95b-4e12-94b7-39f4fe77210d" + workspace_id = "9e5635b3-3bc0-4f97-8c42-fc9f4844225e" } ``` diff --git a/docs/resources/source_twilio_taskrouter.md b/docs/resources/source_twilio_taskrouter.md index e160066ac..a0dddb445 100644 --- a/docs/resources/source_twilio_taskrouter.md +++ b/docs/resources/source_twilio_taskrouter.md @@ -19,9 +19,9 @@ resource "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" { auth_token = "...my_auth_token..." source_type = "twilio-taskrouter" } - name = "Marta Hodkiewicz" + name = "Cathy Ratke" secret_id = "...my_secret_id..." - workspace_id = "8c99c722-d2bc-40f9-8087-d9caae042dd7" + workspace_id = "6065c0ef-a6f9-43b9-8a1b-8c95be1254b7" } ``` diff --git a/docs/resources/source_twitter.md b/docs/resources/source_twitter.md index 7c8f71fca..00876aa99 100644 --- a/docs/resources/source_twitter.md +++ b/docs/resources/source_twitter.md @@ -16,14 +16,14 @@ SourceTwitter Resource resource "airbyte_source_twitter" "my_source_twitter" { configuration = { api_key = "...my_api_key..." - end_date = "2020-12-31T16:49:13.658Z" + end_date = "2022-05-29T22:05:47.839Z" query = "...my_query..." source_type = "twitter" - start_date = "2021-05-21T04:49:52.479Z" + start_date = "2022-02-11T15:55:53.597Z" } - name = "Marco Gleichner" + name = "Elbert Kuhic" secret_id = "...my_secret_id..." - workspace_id = "a1cfe9e1-5df9-4039-87f3-7831983d42e5" + workspace_id = "10d1f655-8c99-4c72-ad2b-c0f94087d9ca" } ``` diff --git a/docs/resources/source_typeform.md b/docs/resources/source_typeform.md index 8985c9c8b..0f4c59a15 100644 --- a/docs/resources/source_typeform.md +++ b/docs/resources/source_typeform.md @@ -22,7 +22,7 @@ resource "airbyte_source_typeform" "my_source_typeform" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2022-05-15T15:21:36.066Z" + token_expiry_date = "2021-02-23T09:05:08.511Z" } } form_ids = [ @@ -31,9 +31,9 @@ resource "airbyte_source_typeform" "my_source_typeform" { source_type = "typeform" start_date = "2021-03-01T00:00:00Z" } - name = "Lucy Johnson" + name = "Rosemarie Spencer" secret_id = "...my_secret_id..." - workspace_id = "7c50233c-1471-4d51-aaa6-ddf5abd6487c" + workspace_id = "aac9b4ca-a1cf-4e9e-95df-903907f37831" } ``` diff --git a/docs/resources/source_us_census.md b/docs/resources/source_us_census.md index c39a7a66b..3a3f1131b 100644 --- a/docs/resources/source_us_census.md +++ b/docs/resources/source_us_census.md @@ -16,13 +16,13 @@ SourceUsCensus Resource resource "airbyte_source_us_census" "my_source_uscensus" { configuration = { api_key = "...my_api_key..." - query_params = "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" - query_path = "data/timeseries/healthins/sahie" + query_params = "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path = "data/2018/acs" source_type = "us-census" } - name = "Brandon Rogahn" + name = "Ginger Gislason" secret_id = "...my_secret_id..." - workspace_id = "2a00bef6-9e10-4015-b630-bda7afded84a" + workspace_id = "54a85466-597c-4502-b3c1-471d51aaa6dd" } ``` diff --git a/docs/resources/source_vantage.md b/docs/resources/source_vantage.md index c83542fcd..fcbfd4b8a 100644 --- a/docs/resources/source_vantage.md +++ b/docs/resources/source_vantage.md @@ -18,9 +18,9 @@ resource "airbyte_source_vantage" "my_source_vantage" { access_token = "...my_access_token..." source_type = "vantage" } - name = "Veronica Pagac Sr." + name = "Corey Pacocha" secret_id = "...my_secret_id..." - workspace_id = "38e1a735-ac26-4ae3-bbef-971a8f46bca1" + workspace_id = "6487c5fc-2b86-42a0-8bef-69e100157630" } ``` diff --git a/docs/resources/source_webflow.md b/docs/resources/source_webflow.md index cdc1186e7..929ea88bc 100644 --- a/docs/resources/source_webflow.md +++ b/docs/resources/source_webflow.md @@ -19,9 +19,9 @@ resource "airbyte_source_webflow" "my_source_webflow" { site_id = "a relatively long hex sequence" source_type = "webflow" } - name = "Jennifer Johnson" + name = "Taylor Paucek" secret_id = "...my_secret_id..." - workspace_id = "965b711d-08cf-488e-89f7-b99a550a656e" + workspace_id = "fded84a3-5a41-4238-a1a7-35ac26ae33be" } ``` diff --git a/docs/resources/source_whisky_hunter.md b/docs/resources/source_whisky_hunter.md index 76fef1179..f78b5ecad 100644 --- a/docs/resources/source_whisky_hunter.md +++ b/docs/resources/source_whisky_hunter.md @@ -17,9 +17,9 @@ resource "airbyte_source_whisky_hunter" "my_source_whiskyhunter" { configuration = { source_type = "whisky-hunter" } - name = "Curtis Dickens" + name = "Miss Terrence Kulas" secret_id = "...my_secret_id..." - workspace_id = "b0ce8aa6-5432-4a98-aeb7-e14ca5640891" + workspace_id = "f46bca11-06fe-4965-b711-d08cf88ec9f7" } ``` diff --git a/docs/resources/source_wikipedia_pageviews.md b/docs/resources/source_wikipedia_pageviews.md index 6b686e927..fb8f9d007 100644 --- a/docs/resources/source_wikipedia_pageviews.md +++ b/docs/resources/source_wikipedia_pageviews.md @@ -15,18 +15,18 @@ SourceWikipediaPageviews Resource ```terraform resource "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" { configuration = { - access = "desktop" - agent = "all-agents" + access = "mobile-app" + agent = "spider" article = "Are_You_the_One%3F" - country = "FR" + country = "IN" end = "...my_end..." project = "www.mediawiki.org" source_type = "wikipedia-pageviews" start = "...my_start..." } - name = "Catherine Mitchell" + name = "Laura Murray" secret_id = "...my_secret_id..." - workspace_id = "8f88ece7-bf90-44e0-9105-d38908162c6b" + workspace_id = "6ed333bb-0ce8-4aa6-9432-a986eb7e14ca" } ``` diff --git a/docs/resources/source_woocommerce.md b/docs/resources/source_woocommerce.md index 24408ddf5..6affe5809 100644 --- a/docs/resources/source_woocommerce.md +++ b/docs/resources/source_woocommerce.md @@ -21,9 +21,9 @@ resource "airbyte_source_woocommerce" "my_source_woocommerce" { source_type = "woocommerce" start_date = "2021-01-01" } - name = "Dr. Mattie Nader" + name = "Laura Lindgren III" secret_id = "...my_secret_id..." - workspace_id = "57b7d03a-1480-4f8d-a30f-069d810618d9" + workspace_id = "0097019a-48f8-48ec-a7bf-904e01105d38" } ``` diff --git a/docs/resources/source_xero.md b/docs/resources/source_xero.md index 5ba4de68a..4026095cf 100644 --- a/docs/resources/source_xero.md +++ b/docs/resources/source_xero.md @@ -26,9 +26,9 @@ resource "airbyte_source_xero" "my_source_xero" { start_date = "2022-03-01T00:00:00Z" tenant_id = "...my_tenant_id..." } - name = "Geraldine Crist" + name = "Roger Hudson" secret_id = "...my_secret_id..." - workspace_id = "7510da80-3122-492c-861c-2a702bb97ee1" + workspace_id = "6beb68a0-f657-4b7d-83a1-480f8de30f06" } ``` diff --git a/docs/resources/source_xkcd.md b/docs/resources/source_xkcd.md index 4b3b86f26..521fbe492 100644 --- a/docs/resources/source_xkcd.md +++ b/docs/resources/source_xkcd.md @@ -17,9 +17,9 @@ resource "airbyte_source_xkcd" "my_source_xkcd" { configuration = { source_type = "xkcd" } - name = "Bonnie Steuber" + name = "Mr. Laurence Littel" secret_id = "...my_secret_id..." - workspace_id = "de35f8e0-1bf3-43ea-ab45-402ac1704bf1" + workspace_id = "18d97e15-2297-4510-9a80-312292cc61c2" } ``` diff --git a/docs/resources/source_yandex_metrica.md b/docs/resources/source_yandex_metrica.md index 8a2c843ee..c51ceb07f 100644 --- a/docs/resources/source_yandex_metrica.md +++ b/docs/resources/source_yandex_metrica.md @@ -21,9 +21,9 @@ resource "airbyte_source_yandex_metrica" "my_source_yandexmetrica" { source_type = "yandex-metrica" start_date = "2022-01-01" } - name = "Reginald Carter" + name = "Dominic Marvin" secret_id = "...my_secret_id..." - workspace_id = "e5eb5f0c-492b-4574-8d08-a2267aaee79e" + workspace_id = "e102da2d-e35f-48e0-9bf3-3eaab45402ac" } ``` diff --git a/docs/resources/source_yotpo.md b/docs/resources/source_yotpo.md index 9d8aed38c..0b4f4c155 100644 --- a/docs/resources/source_yotpo.md +++ b/docs/resources/source_yotpo.md @@ -17,13 +17,13 @@ resource "airbyte_source_yotpo" "my_source_yotpo" { configuration = { access_token = "...my_access_token..." app_key = "...my_app_key..." - email = "Petra66@gmail.com" + email = "Ibrahim74@gmail.com" source_type = "yotpo" start_date = "2022-03-01T00:00:00.000Z" } - name = "Hannah Volkman" + name = "Clark McGlynn" secret_id = "...my_secret_id..." - workspace_id = "83d2378a-e3bf-4c23-9945-0a986a495bac" + workspace_id = "61aae5eb-5f0c-4492-b574-4d08a2267aae" } ``` diff --git a/docs/resources/source_younium.md b/docs/resources/source_younium.md index 646b20961..187ae007c 100644 --- a/docs/resources/source_younium.md +++ b/docs/resources/source_younium.md @@ -17,13 +17,13 @@ resource "airbyte_source_younium" "my_source_younium" { configuration = { legal_entity = "...my_legal_entity..." password = "...my_password..." - playground = false + playground = true source_type = "younium" - username = "Angeline.Kunze40" + username = "Jairo.Monahan79" } - name = "Ernest Larkin" + name = "Martha Orn" secret_id = "...my_secret_id..." - workspace_id = "c8649238-6f62-4c96-9c4c-c6b78890a3fd" + workspace_id = "1becb83d-2378-4ae3-bfc2-3d9450a986a4" } ``` diff --git a/docs/resources/source_youtube_analytics.md b/docs/resources/source_youtube_analytics.md index e4825505e..c3833c756 100644 --- a/docs/resources/source_youtube_analytics.md +++ b/docs/resources/source_youtube_analytics.md @@ -22,9 +22,9 @@ resource "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" { } source_type = "youtube-analytics" } - name = "Dr. Kara Lowe" + name = "Tommy Rippin" secret_id = "...my_secret_id..." - workspace_id = "10f8c23d-f931-4da3-adb5-1fad94acc943" + workspace_id = "707f06b2-8ecc-4864-9238-6f62c969c4cc" } ``` diff --git a/docs/resources/source_zendesk_chat.md b/docs/resources/source_zendesk_chat.md index 7891de10b..45c2cc0b7 100644 --- a/docs/resources/source_zendesk_chat.md +++ b/docs/resources/source_zendesk_chat.md @@ -25,9 +25,9 @@ resource "airbyte_source_zendesk_chat" "my_source_zendeskchat" { start_date = "2021-02-01T00:00:00Z" subdomain = "...my_subdomain..." } - name = "Melinda Koch" + name = "Mabel Lebsack MD" secret_id = "...my_secret_id..." - workspace_id = "d15321b8-32a5-46d6-9180-ff60eb9a6658" + workspace_id = "3fd3c81d-a10f-48c2-bdf9-31da3edb51fa" } ``` diff --git a/docs/resources/source_zendesk_sunshine.md b/docs/resources/source_zendesk_sunshine.md index 88e3bf37c..625122394 100644 --- a/docs/resources/source_zendesk_sunshine.md +++ b/docs/resources/source_zendesk_sunshine.md @@ -19,16 +19,16 @@ resource "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" { source_zendesk_sunshine_authorization_method_api_token = { api_token = "...my_api_token..." auth_method = "api_token" - email = "Hollis.Mann72@hotmail.com" + email = "Leonor_Funk@hotmail.com" } } source_type = "zendesk-sunshine" start_date = "2021-01-01T00:00:00Z" subdomain = "...my_subdomain..." } - name = "Alexander Friesen" + name = "Mrs. Edith Hermiston" secret_id = "...my_secret_id..." - workspace_id = "82dbec75-c68c-4606-9946-8ce304d8849b" + workspace_id = "726d1532-1b83-42a5-ad69-180ff60eb9a6" } ``` @@ -82,10 +82,6 @@ Required: - `auth_method` (String) must be one of ["api_token"] - `email` (String) The user email for your Zendesk account -Optional: - -- `additional_properties` (String) Parsed as JSON. - ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_authorization_method_o_auth2_0` @@ -97,10 +93,6 @@ Required: - `client_id` (String) The Client ID of your OAuth application. - `client_secret` (String) The Client Secret of your OAuth application. -Optional: - -- `additional_properties` (String) Parsed as JSON. - ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_api_token` @@ -111,10 +103,6 @@ Required: - `auth_method` (String) must be one of ["api_token"] - `email` (String) The user email for your Zendesk account -Optional: - -- `additional_properties` (String) Parsed as JSON. - ### Nested Schema for `configuration.credentials.source_zendesk_sunshine_update_authorization_method_o_auth2_0` @@ -126,8 +114,4 @@ Required: - `client_id` (String) The Client ID of your OAuth application. - `client_secret` (String) The Client Secret of your OAuth application. -Optional: - -- `additional_properties` (String) Parsed as JSON. - diff --git a/docs/resources/source_zendesk_support.md b/docs/resources/source_zendesk_support.md index 7313a7295..88d61152d 100644 --- a/docs/resources/source_zendesk_support.md +++ b/docs/resources/source_zendesk_support.md @@ -19,17 +19,17 @@ resource "airbyte_source_zendesk_support" "my_source_zendesksupport" { source_zendesk_support_authentication_api_token = { api_token = "...my_api_token..." credentials = "api_token" - email = "Kacie27@hotmail.com" + email = "Ezequiel.Lindgren56@yahoo.com" } } - ignore_pagination = false + ignore_pagination = true source_type = "zendesk-support" start_date = "2020-10-15T00:00:00Z" subdomain = "...my_subdomain..." } - name = "May McClure" + name = "Alexander Friesen" secret_id = "...my_secret_id..." - workspace_id = "b0c69e37-2db1-4344-ba9f-78a5c0ed7aab" + workspace_id = "82dbec75-c68c-4606-9946-8ce304d8849b" } ``` @@ -57,13 +57,13 @@ resource "airbyte_source_zendesk_support" "my_source_zendesksupport" { Required: - `source_type` (String) must be one of ["zendesk-support"] -- `start_date` (String) The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. - `subdomain` (String) This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain. Optional: - `credentials` (Attributes) Zendesk allows two authentication methods. We recommend using `OAuth2.0` for Airbyte Cloud users and `API token` for Airbyte Open Source users. (see [below for nested schema](#nestedatt--configuration--credentials)) - `ignore_pagination` (Boolean) Makes each stream read a single page of data. +- `start_date` (String) The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. ### Nested Schema for `configuration.credentials` diff --git a/docs/resources/source_zendesk_talk.md b/docs/resources/source_zendesk_talk.md index 19a3faa19..e75284df5 100644 --- a/docs/resources/source_zendesk_talk.md +++ b/docs/resources/source_zendesk_talk.md @@ -19,16 +19,16 @@ resource "airbyte_source_zendesk_talk" "my_source_zendesktalk" { source_zendesk_talk_authentication_api_token = { api_token = "...my_api_token..." auth_type = "api_token" - email = "Brant.Walter@yahoo.com" + email = "Kacie27@hotmail.com" } } source_type = "zendesk-talk" start_date = "2020-10-15T00:00:00Z" subdomain = "...my_subdomain..." } - name = "Dr. Lana Ritchie" + name = "Jackie Welch" secret_id = "...my_secret_id..." - workspace_id = "8d27b519-96b5-4b4b-90ee-f712b7a7ab03" + workspace_id = "bb0c69e3-72db-4134-8ba9-f78a5c0ed7aa" } ``` diff --git a/docs/resources/source_zenloop.md b/docs/resources/source_zenloop.md index 6b68b2972..46ba116d7 100644 --- a/docs/resources/source_zenloop.md +++ b/docs/resources/source_zenloop.md @@ -21,9 +21,9 @@ resource "airbyte_source_zenloop" "my_source_zenloop" { survey_group_id = "...my_survey_group_id..." survey_id = "...my_survey_id..." } - name = "Ms. Emma Rodriguez Jr." + name = "Ricardo Champlin" secret_id = "...my_secret_id..." - workspace_id = "688deebe-f897-4f3d-90cc-d33f11b3e4e0" + workspace_id = "7261fb0c-58d2-47b5-9996-b5b4b50eef71" } ``` diff --git a/docs/resources/source_zoho_crm.md b/docs/resources/source_zoho_crm.md index 610019703..3bc56397a 100644 --- a/docs/resources/source_zoho_crm.md +++ b/docs/resources/source_zoho_crm.md @@ -17,16 +17,16 @@ resource "airbyte_source_zoho_crm" "my_source_zohocrm" { configuration = { client_id = "...my_client_id..." client_secret = "...my_client_secret..." - dc_region = "IN" - edition = "Free" + dc_region = "US" + edition = "Enterprise" environment = "Developer" refresh_token = "...my_refresh_token..." source_type = "zoho-crm" start_datetime = "2000-01-01T13:00+00:00" } - name = "Alice Lind" + name = "Kenneth Fisher" secret_id = "...my_secret_id..." - workspace_id = "c759e02f-3702-4c5c-8e2d-30ead3104fa4" + workspace_id = "b1710688-deeb-4ef8-97f3-dd0ccd33f11b" } ``` diff --git a/docs/resources/source_zoom.md b/docs/resources/source_zoom.md index 8c5f7d02f..188b05b34 100644 --- a/docs/resources/source_zoom.md +++ b/docs/resources/source_zoom.md @@ -18,9 +18,9 @@ resource "airbyte_source_zoom" "my_source_zoom" { jwt_token = "...my_jwt_token..." source_type = "zoom" } - name = "Charlene Abbott" + name = "Alexis Gutmann IV" secret_id = "...my_secret_id..." - workspace_id = "f375b442-8282-41fd-b2f6-9e59267c71cc" + workspace_id = "0aa10418-6ec7-459e-82f3-702c5c8e2d30" } ``` diff --git a/docs/resources/source_zuora.md b/docs/resources/source_zuora.md index 20571983d..d1174e7d4 100644 --- a/docs/resources/source_zuora.md +++ b/docs/resources/source_zuora.md @@ -20,12 +20,12 @@ resource "airbyte_source_zuora" "my_source_zuora" { data_query = "Unlimited" source_type = "zuora" start_date = "...my_start_date..." - tenant_endpoint = "EU API Sandbox" - window_in_days = "1" + tenant_endpoint = "US Performance Test" + window_in_days = "200" } - name = "Carroll Grant" + name = "Joan Bednar" secret_id = "...my_secret_id..." - workspace_id = "8d0358a8-2c80-48fe-a751-a2047c0449e1" + workspace_id = "a44707bf-375b-4442-8282-1fdb2f69e592" } ``` diff --git a/docs/resources/workspace.md b/docs/resources/workspace.md index de75720ba..a3c6ca178 100644 --- a/docs/resources/workspace.md +++ b/docs/resources/workspace.md @@ -14,7 +14,7 @@ Workspace Resource ```terraform resource "airbyte_workspace" "my_workspace" { - name = "Crystal Weissnat" + name = "Glenda Schiller DDS" } ``` diff --git a/examples/data-sources/airbyte_destination_milvus/data-source.tf b/examples/data-sources/airbyte_destination_milvus/data-source.tf new file mode 100755 index 000000000..f44a53a1b --- /dev/null +++ b/examples/data-sources/airbyte_destination_milvus/data-source.tf @@ -0,0 +1,3 @@ +data "airbyte_destination_milvus" "my_destination_milvus" { + destination_id = "...my_destination_id..." +} \ No newline at end of file diff --git a/examples/data-sources/airbyte_destination_pinecone/data-source.tf b/examples/data-sources/airbyte_destination_pinecone/data-source.tf new file mode 100755 index 000000000..e5976353a --- /dev/null +++ b/examples/data-sources/airbyte_destination_pinecone/data-source.tf @@ -0,0 +1,3 @@ +data "airbyte_destination_pinecone" "my_destination_pinecone" { + destination_id = "...my_destination_id..." +} \ No newline at end of file diff --git a/examples/data-sources/airbyte_source_datadog/data-source.tf b/examples/data-sources/airbyte_source_datadog/data-source.tf deleted file mode 100755 index a2de20acd..000000000 --- a/examples/data-sources/airbyte_source_datadog/data-source.tf +++ /dev/null @@ -1,4 +0,0 @@ -data "airbyte_source_datadog" "my_source_datadog" { - secret_id = "...my_secret_id..." - source_id = "...my_source_id..." -} \ No newline at end of file diff --git a/examples/data-sources/airbyte_source_openweather/data-source.tf b/examples/data-sources/airbyte_source_openweather/data-source.tf deleted file mode 100755 index bd482766d..000000000 --- a/examples/data-sources/airbyte_source_openweather/data-source.tf +++ /dev/null @@ -1,4 +0,0 @@ -data "airbyte_source_openweather" "my_source_openweather" { - secret_id = "...my_secret_id..." - source_id = "...my_source_id..." -} \ No newline at end of file diff --git a/examples/data-sources/airbyte_source_public_apis/data-source.tf b/examples/data-sources/airbyte_source_public_apis/data-source.tf deleted file mode 100755 index cf38b4ec9..000000000 --- a/examples/data-sources/airbyte_source_public_apis/data-source.tf +++ /dev/null @@ -1,4 +0,0 @@ -data "airbyte_source_public_apis" "my_source_publicapis" { - secret_id = "...my_secret_id..." - source_id = "...my_source_id..." -} \ No newline at end of file diff --git a/examples/provider/provider.tf b/examples/provider/provider.tf index 6a0189284..612369c85 100755 --- a/examples/provider/provider.tf +++ b/examples/provider/provider.tf @@ -2,7 +2,7 @@ terraform { required_providers { airbyte = { source = "airbytehq/airbyte" - version = "0.3.3" + version = "0.3.4" } } } diff --git a/examples/resources/airbyte_destination_bigquery/resource.tf b/examples/resources/airbyte_destination_bigquery/resource.tf index ea2db9e72..52238cfc1 100755 --- a/examples/resources/airbyte_destination_bigquery/resource.tf +++ b/examples/resources/airbyte_destination_bigquery/resource.tf @@ -24,8 +24,7 @@ resource "airbyte_destination_bigquery" "my_destination_bigquery" { project_id = "...my_project_id..." raw_data_dataset = "...my_raw_data_dataset..." transformation_priority = "batch" - use_1s1t_format = false } - name = "Alison Mann" - workspace_id = "488e1e91-e450-4ad2-abd4-4269802d502a" + name = "Edna Pouros" + workspace_id = "d488e1e9-1e45-40ad-aabd-44269802d502" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf b/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf index 88590f130..510a75b11 100755 --- a/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf +++ b/examples/resources/airbyte_destination_bigquery_denormalized/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_destination_bigquery_denormalized" "my_destination_bigqueryden big_query_client_buffer_size_mb = 15 credentials_json = "...my_credentials_json..." dataset_id = "...my_dataset_id..." - dataset_location = "asia-southeast2" + dataset_location = "europe-west7" destination_type = "bigquery-denormalized" loading_method = { destination_bigquery_denormalized_loading_method_gcs_staging = { @@ -23,6 +23,6 @@ resource "airbyte_destination_bigquery_denormalized" "my_destination_bigqueryden } project_id = "...my_project_id..." } - name = "Lucia Kemmer" - workspace_id = "969e9a3e-fa77-4dfb-94cd-66ae395efb9b" + name = "Francisco Windler" + workspace_id = "c969e9a3-efa7-47df-b14c-d66ae395efb9" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_clickhouse/resource.tf b/examples/resources/airbyte_destination_clickhouse/resource.tf index bfea5e284..0f5974c9e 100755 --- a/examples/resources/airbyte_destination_clickhouse/resource.tf +++ b/examples/resources/airbyte_destination_clickhouse/resource.tf @@ -11,8 +11,8 @@ resource "airbyte_destination_clickhouse" "my_destination_clickhouse" { tunnel_method = "NO_TUNNEL" } } - username = "Jewell.Lesch64" + username = "Magdalena_Kuvalis" } - name = "Wilma Mosciski" - workspace_id = "074ba446-9b6e-4214-9959-890afa563e25" + name = "Sandy Huels" + workspace_id = "97074ba4-469b-46e2-9419-59890afa563e" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_convex/resource.tf b/examples/resources/airbyte_destination_convex/resource.tf index 1409f91da..be190f800 100755 --- a/examples/resources/airbyte_destination_convex/resource.tf +++ b/examples/resources/airbyte_destination_convex/resource.tf @@ -4,6 +4,6 @@ resource "airbyte_destination_convex" "my_destination_convex" { deployment_url = "https://murky-swan-635.convex.cloud" destination_type = "convex" } - name = "Melba Toy" - workspace_id = "8b711e5b-7fd2-4ed0-a892-1cddc692601f" + name = "Joyce Kertzmann" + workspace_id = "4c8b711e-5b7f-4d2e-9028-921cddc69260" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_cumulio/resource.tf b/examples/resources/airbyte_destination_cumulio/resource.tf index b468c6a67..328e8df4d 100755 --- a/examples/resources/airbyte_destination_cumulio/resource.tf +++ b/examples/resources/airbyte_destination_cumulio/resource.tf @@ -5,6 +5,6 @@ resource "airbyte_destination_cumulio" "my_destination_cumulio" { api_token = "...my_api_token..." destination_type = "cumulio" } - name = "Clyde Kling" - workspace_id = "0d5f0d30-c5fb-4b25-8705-3202c73d5fe9" + name = "Ebony Predovic" + workspace_id = "6b0d5f0d-30c5-4fbb-a587-053202c73d5f" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_databend/resource.tf b/examples/resources/airbyte_destination_databend/resource.tf index fb433b633..b42cdb76b 100755 --- a/examples/resources/airbyte_destination_databend/resource.tf +++ b/examples/resources/airbyte_destination_databend/resource.tf @@ -6,8 +6,8 @@ resource "airbyte_destination_databend" "my_destination_databend" { password = "...my_password..." port = 443 table = "default" - username = "Lauryn.Bartoletti50" + username = "Leo.Purdy" } - name = "Gary Mayert" - workspace_id = "fe49a8d9-cbf4-4863-b323-f9b77f3a4100" + name = "Bobby Kutch V" + workspace_id = "b3fe49a8-d9cb-4f48-a333-23f9b77f3a41" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_databricks/resource.tf b/examples/resources/airbyte_destination_databricks/resource.tf index 8fb8c1ae9..33aed18f4 100755 --- a/examples/resources/airbyte_destination_databricks/resource.tf +++ b/examples/resources/airbyte_destination_databricks/resource.tf @@ -13,9 +13,9 @@ resource "airbyte_destination_databricks" "my_destination_databricks" { databricks_server_hostname = "abc-12345678-wxyz.cloud.databricks.com" destination_type = "databricks" enable_schema_evolution = true - purge_staging_data = true + purge_staging_data = false schema = "default" } - name = "Rickey Wolf" - workspace_id = "280d1ba7-7a89-4ebf-b37a-e4203ce5e6a9" + name = "Bertha Thompson" + workspace_id = "69280d1b-a77a-489e-bf73-7ae4203ce5e6" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_dev_null/resource.tf b/examples/resources/airbyte_destination_dev_null/resource.tf index 5db2619c5..2f870d3a4 100755 --- a/examples/resources/airbyte_destination_dev_null/resource.tf +++ b/examples/resources/airbyte_destination_dev_null/resource.tf @@ -7,6 +7,6 @@ resource "airbyte_destination_dev_null" "my_destination_devnull" { } } } - name = "Kari Leannon PhD" - workspace_id = "446ce2af-7a73-4cf3-be45-3f870b326b5a" + name = "Rene Hane" + workspace_id = "a0d446ce-2af7-4a73-8f3b-e453f870b326" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_dynamodb/resource.tf b/examples/resources/airbyte_destination_dynamodb/resource.tf index 446972f7c..51f90ef97 100755 --- a/examples/resources/airbyte_destination_dynamodb/resource.tf +++ b/examples/resources/airbyte_destination_dynamodb/resource.tf @@ -3,10 +3,10 @@ resource "airbyte_destination_dynamodb" "my_destination_dynamodb" { access_key_id = "A012345678910EXAMPLE" destination_type = "dynamodb" dynamodb_endpoint = "http://localhost:9000" - dynamodb_region = "ap-southeast-2" + dynamodb_region = "eu-south-1" dynamodb_table_name_prefix = "airbyte_sync" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" } - name = "Amber Dibbert" - workspace_id = "db1a8422-bb67-49d2-b227-15bf0cbb1e31" + name = "Joanna Kohler" + workspace_id = "29cdb1a8-422b-4b67-9d23-22715bf0cbb1" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_elasticsearch/resource.tf b/examples/resources/airbyte_destination_elasticsearch/resource.tf index c8fe3791f..fb426e449 100755 --- a/examples/resources/airbyte_destination_elasticsearch/resource.tf +++ b/examples/resources/airbyte_destination_elasticsearch/resource.tf @@ -10,8 +10,8 @@ resource "airbyte_destination_elasticsearch" "my_destination_elasticsearch" { ca_certificate = "...my_ca_certificate..." destination_type = "elasticsearch" endpoint = "...my_endpoint..." - upsert = false + upsert = true } - name = "Dr. Randolph McDermott" - workspace_id = "443a1108-e0ad-4cf4-b921-879fce953f73" + name = "Carolyn Rohan" + workspace_id = "90f3443a-1108-4e0a-9cf4-b921879fce95" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_firebolt/resource.tf b/examples/resources/airbyte_destination_firebolt/resource.tf index c2885573f..9ef3dbd3c 100755 --- a/examples/resources/airbyte_destination_firebolt/resource.tf +++ b/examples/resources/airbyte_destination_firebolt/resource.tf @@ -17,6 +17,6 @@ resource "airbyte_destination_firebolt" "my_destination_firebolt" { password = "...my_password..." username = "username@email.com" } - name = "Jermaine Kuhic" - workspace_id = "d74dd39c-0f5d-42cf-b7c7-0a45626d4368" + name = "Roman Kulas" + workspace_id = "c7abd74d-d39c-40f5-92cf-f7c70a45626d" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_firestore/resource.tf b/examples/resources/airbyte_destination_firestore/resource.tf index 116723897..27edf3869 100755 --- a/examples/resources/airbyte_destination_firestore/resource.tf +++ b/examples/resources/airbyte_destination_firestore/resource.tf @@ -4,6 +4,6 @@ resource "airbyte_destination_firestore" "my_destination_firestore" { destination_type = "firestore" project_id = "...my_project_id..." } - name = "Ms. Cindy Wuckert" - workspace_id = "9f5fce6c-5561-446c-be25-0fb008c42e14" + name = "Paula Jacobs I" + workspace_id = "f16d9f5f-ce6c-4556-946c-3e250fb008c4" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_gcs/resource.tf b/examples/resources/airbyte_destination_gcs/resource.tf index aa882755e..0b8651aa1 100755 --- a/examples/resources/airbyte_destination_gcs/resource.tf +++ b/examples/resources/airbyte_destination_gcs/resource.tf @@ -20,8 +20,8 @@ resource "airbyte_destination_gcs" "my_destination_gcs" { } gcs_bucket_name = "airbyte_sync" gcs_bucket_path = "data_sync/test" - gcs_bucket_region = "us-east1" + gcs_bucket_region = "us-west1" } - name = "Neal Schroeder" - workspace_id = "6c8dd6b1-4429-4074-b477-8a7bd466d28c" + name = "Miss Dennis Friesen" + workspace_id = "c366c8dd-6b14-4429-8747-4778a7bd466d" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_google_sheets/resource.tf b/examples/resources/airbyte_destination_google_sheets/resource.tf index 499b9ea65..4a1cf9bba 100755 --- a/examples/resources/airbyte_destination_google_sheets/resource.tf +++ b/examples/resources/airbyte_destination_google_sheets/resource.tf @@ -8,6 +8,6 @@ resource "airbyte_destination_google_sheets" "my_destination_googlesheets" { destination_type = "google-sheets" spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG/edit" } - name = "Angela Olson" - workspace_id = "cdca4251-904e-4523-87e0-bc7178e4796f" + name = "Mr. Irma Schaefer" + workspace_id = "b3cdca42-5190-44e5-a3c7-e0bc7178e479" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_keen/resource.tf b/examples/resources/airbyte_destination_keen/resource.tf index 0d1cc9320..d7ed98c5e 100755 --- a/examples/resources/airbyte_destination_keen/resource.tf +++ b/examples/resources/airbyte_destination_keen/resource.tf @@ -2,9 +2,9 @@ resource "airbyte_destination_keen" "my_destination_keen" { configuration = { api_key = "ABCDEFGHIJKLMNOPRSTUWXYZ" destination_type = "keen" - infer_timestamp = true + infer_timestamp = false project_id = "58b4acc22ba938934e888322e" } - name = "Fernando Barton" - workspace_id = "88282aa4-8256-42f2-a2e9-817ee17cbe61" + name = "Todd Oberbrunner DDS" + workspace_id = "688282aa-4825-462f-a22e-9817ee17cbe6" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_kinesis/resource.tf b/examples/resources/airbyte_destination_kinesis/resource.tf index 6f7ce1460..c2e703465 100755 --- a/examples/resources/airbyte_destination_kinesis/resource.tf +++ b/examples/resources/airbyte_destination_kinesis/resource.tf @@ -1,13 +1,13 @@ resource "airbyte_destination_kinesis" "my_destination_kinesis" { configuration = { access_key = "...my_access_key..." - buffer_size = 9 + buffer_size = 1 destination_type = "kinesis" endpoint = "kinesis.us‑west‑1.amazonaws.com" private_key = "...my_private_key..." region = "us‑west‑1" - shard_count = 4 + shard_count = 9 } - name = "Javier Price" - workspace_id = "bc0ab3c2-0c4f-4378-9fd8-71f99dd2efd1" + name = "Opal Kozey" + workspace_id = "5bc0ab3c-20c4-4f37-89fd-871f99dd2efd" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_langchain/resource.tf b/examples/resources/airbyte_destination_langchain/resource.tf index 2303cabc1..2954d8c7f 100755 --- a/examples/resources/airbyte_destination_langchain/resource.tf +++ b/examples/resources/airbyte_destination_langchain/resource.tf @@ -14,13 +14,13 @@ resource "airbyte_destination_langchain" "my_destination_langchain" { } } processing = { - chunk_overlap = 1 + chunk_overlap = 0 chunk_size = 1 text_fields = [ "...", ] } } - name = "Shannon Jacobi DVM" - workspace_id = "674bdb04-f157-4560-82d6-8ea19f1d1705" + name = "Hattie Nader" + workspace_id = "1e674bdb-04f1-4575-a082-d68ea19f1d17" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_milvus/resource.tf b/examples/resources/airbyte_destination_milvus/resource.tf new file mode 100755 index 000000000..5530c8c8f --- /dev/null +++ b/examples/resources/airbyte_destination_milvus/resource.tf @@ -0,0 +1,36 @@ +resource "airbyte_destination_milvus" "my_destination_milvus" { + configuration = { + destination_type = "milvus" + embedding = { + destination_milvus_embedding_cohere = { + cohere_key = "...my_cohere_key..." + mode = "cohere" + } + } + indexing = { + auth = { + destination_milvus_indexing_authentication_api_token = { + mode = "token" + token = "...my_token..." + } + } + collection = "...my_collection..." + db = "...my_db..." + host = "https://my-instance.zone.zillizcloud.com" + text_field = "...my_text_field..." + vector_field = "...my_vector_field..." + } + processing = { + chunk_overlap = 3 + chunk_size = 0 + metadata_fields = [ + "...", + ] + text_fields = [ + "...", + ] + } + } + name = "Sherry Morar IV" + workspace_id = "086a1840-394c-4260-b1f9-3f5f0642dac7" +} \ No newline at end of file diff --git a/examples/resources/airbyte_destination_mongodb/resource.tf b/examples/resources/airbyte_destination_mongodb/resource.tf index 5edfc26b8..6370f8749 100755 --- a/examples/resources/airbyte_destination_mongodb/resource.tf +++ b/examples/resources/airbyte_destination_mongodb/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_destination_mongodb" "my_destination_mongodb" { destination_mongodb_authorization_type_login_password = { authorization = "login/password" password = "...my_password..." - username = "Asha61" + username = "Lucienne.Yundt" } } database = "...my_database..." @@ -21,6 +21,6 @@ resource "airbyte_destination_mongodb" "my_destination_mongodb" { } } } - name = "Ms. Kenneth Ledner" - workspace_id = "a1840394-c260-471f-93f5-f0642dac7af5" + name = "Robyn Schmitt I" + workspace_id = "aa63aae8-d678-464d-bb67-5fd5e60b375e" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_mssql/resource.tf b/examples/resources/airbyte_destination_mssql/resource.tf index bc4b05d65..bfb5a45f1 100755 --- a/examples/resources/airbyte_destination_mssql/resource.tf +++ b/examples/resources/airbyte_destination_mssql/resource.tf @@ -17,8 +17,8 @@ resource "airbyte_destination_mssql" "my_destination_mssql" { tunnel_method = "NO_TUNNEL" } } - username = "Elwyn.Sawayn7" + username = "Desiree_Yost" } - name = "Kristine Ondricka" - workspace_id = "aae8d678-64db-4b67-9fd5-e60b375ed4f6" + name = "Bert Treutel DVM" + workspace_id = "33317fe3-5b60-4eb1-aa42-6555ba3c2874" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_mysql/resource.tf b/examples/resources/airbyte_destination_mysql/resource.tf index a949e555d..8827abfb9 100755 --- a/examples/resources/airbyte_destination_mysql/resource.tf +++ b/examples/resources/airbyte_destination_mysql/resource.tf @@ -11,8 +11,8 @@ resource "airbyte_destination_mysql" "my_destination_mysql" { tunnel_method = "NO_TUNNEL" } } - username = "Mavis.Ullrich12" + username = "Sheldon.Smitham" } - name = "Chad Franey IV" - workspace_id = "fe35b60e-b1ea-4426-955b-a3c28744ed53" + name = "Guy Luettgen" + workspace_id = "a8d8f5c0-b2f2-4fb7-b194-a276b26916fe" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_oracle/resource.tf b/examples/resources/airbyte_destination_oracle/resource.tf index 09b470eab..b0db9a2ae 100755 --- a/examples/resources/airbyte_destination_oracle/resource.tf +++ b/examples/resources/airbyte_destination_oracle/resource.tf @@ -12,8 +12,8 @@ resource "airbyte_destination_oracle" "my_destination_oracle" { tunnel_method = "NO_TUNNEL" } } - username = "Jimmy.Luettgen63" + username = "Viviane_Aufderhar" } - name = "Dewey Leannon" - workspace_id = "c0b2f2fb-7b19-44a2-b6b2-6916fe1f08f4" + name = "Tammy Medhurst" + workspace_id = "3698f447-f603-4e8b-845e-80ca55efd20e" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_pinecone/resource.tf b/examples/resources/airbyte_destination_pinecone/resource.tf new file mode 100755 index 000000000..0e6dcbdd4 --- /dev/null +++ b/examples/resources/airbyte_destination_pinecone/resource.tf @@ -0,0 +1,28 @@ +resource "airbyte_destination_pinecone" "my_destination_pinecone" { + configuration = { + destination_type = "pinecone" + embedding = { + destination_pinecone_embedding_cohere = { + cohere_key = "...my_cohere_key..." + mode = "cohere" + } + } + indexing = { + index = "...my_index..." + pinecone_environment = "...my_pinecone_environment..." + pinecone_key = "...my_pinecone_key..." + } + processing = { + chunk_overlap = 2 + chunk_size = 3 + metadata_fields = [ + "...", + ] + text_fields = [ + "...", + ] + } + } + name = "Cecelia Braun" + workspace_id = "8b6a89fb-e3a5-4aa8-a482-4d0ab4075088" +} \ No newline at end of file diff --git a/examples/resources/airbyte_destination_postgres/resource.tf b/examples/resources/airbyte_destination_postgres/resource.tf index df4663b51..21400b6a8 100755 --- a/examples/resources/airbyte_destination_postgres/resource.tf +++ b/examples/resources/airbyte_destination_postgres/resource.tf @@ -17,8 +17,8 @@ resource "airbyte_destination_postgres" "my_destination_postgres" { tunnel_method = "NO_TUNNEL" } } - username = "Laron.Gerlach40" + username = "Foster.Borer" } - name = "Isaac Wolf" - workspace_id = "7f603e8b-445e-480c-a55e-fd20e457e185" + name = "Karen Kautzer" + workspace_id = "904f3b11-94b8-4abf-a03a-79f9dfe0ab7d" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_pubsub/resource.tf b/examples/resources/airbyte_destination_pubsub/resource.tf index 5bc3e621d..0aa235ea2 100755 --- a/examples/resources/airbyte_destination_pubsub/resource.tf +++ b/examples/resources/airbyte_destination_pubsub/resource.tf @@ -1,15 +1,15 @@ resource "airbyte_destination_pubsub" "my_destination_pubsub" { configuration = { - batching_delay_threshold = 5 - batching_element_count_threshold = 8 - batching_enabled = false - batching_request_bytes_threshold = 7 + batching_delay_threshold = 7 + batching_element_count_threshold = 5 + batching_enabled = true + batching_request_bytes_threshold = 3 credentials_json = "...my_credentials_json..." destination_type = "pubsub" - ordering_enabled = false + ordering_enabled = true project_id = "...my_project_id..." topic_id = "...my_topic_id..." } - name = "Irvin Rath" - workspace_id = "a5aa8e48-24d0-4ab4-8750-88e51862065e" + name = "Phil Boyer" + workspace_id = "f86bc173-d689-4eee-9526-f8d986e881ea" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_redis/resource.tf b/examples/resources/airbyte_destination_redis/resource.tf index 29215c10c..f4b410ea8 100755 --- a/examples/resources/airbyte_destination_redis/resource.tf +++ b/examples/resources/airbyte_destination_redis/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_destination_redis" "my_destination_redis" { destination_type = "redis" host = "localhost,127.0.0.1" password = "...my_password..." - port = 6 + port = 9 ssl = false ssl_mode = { destination_redis_ssl_modes_disable = { @@ -16,8 +16,8 @@ resource "airbyte_destination_redis" "my_destination_redis" { tunnel_method = "NO_TUNNEL" } } - username = "Duncan69" + username = "Vivianne.Baumbach3" } - name = "Diane Mayer" - workspace_id = "8abf603a-79f9-4dfe-8ab7-da8a50ce187f" + name = "Bonnie Halvorson" + workspace_id = "f94e29e9-73e9-422a-97a1-5be3e060807e" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_redshift/resource.tf b/examples/resources/airbyte_destination_redshift/resource.tf index 4b5da214c..069b130b6 100755 --- a/examples/resources/airbyte_destination_redshift/resource.tf +++ b/examples/resources/airbyte_destination_redshift/resource.tf @@ -22,17 +22,17 @@ resource "airbyte_destination_redshift" "my_destination_redshift" { } } file_buffer_count = 10 - file_name_pattern = "{part_number}" + file_name_pattern = "{timestamp}" method = "S3 Staging" - purge_staging_data = true + purge_staging_data = false s3_bucket_name = "airbyte.staging" s3_bucket_path = "data_sync/test" - s3_bucket_region = "us-east-2" + s3_bucket_region = "us-west-2" secret_access_key = "...my_secret_access_key..." } } - username = "Jairo.Farrell53" + username = "Margarette_Rau" } - name = "Frankie Torphy" - workspace_id = "526f8d98-6e88-41ea-94f0-e1012563f94e" + name = "Mrs. Geraldine Zulauf" + workspace_id = "7a60ff2a-54a3-41e9-8764-a3e865e7956f" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_s3/resource.tf b/examples/resources/airbyte_destination_s3/resource.tf index 18ad308cd..4f2f1b546 100755 --- a/examples/resources/airbyte_destination_s3/resource.tf +++ b/examples/resources/airbyte_destination_s3/resource.tf @@ -2,7 +2,7 @@ resource "airbyte_destination_s3" "my_destination_s3" { configuration = { access_key_id = "A012345678910EXAMPLE" destination_type = "s3" - file_name_pattern = "{date}" + file_name_pattern = "{timestamp}" format = { destination_s3_output_format_avro_apache_avro = { compression_codec = { @@ -15,11 +15,11 @@ resource "airbyte_destination_s3" "my_destination_s3" { } s3_bucket_name = "airbyte_sync" s3_bucket_path = "data_sync/test" - s3_bucket_region = "cn-northwest-1" + s3_bucket_region = "us-west-1" s3_endpoint = "http://localhost:9000" s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" } - name = "Freddie King" - workspace_id = "922a57a1-5be3-4e06-8807-e2b6e3ab8845" + name = "Joyce O'Kon" + workspace_id = "9da660ff-57bf-4aad-8f9e-fc1b4512c103" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_s3_glue/resource.tf b/examples/resources/airbyte_destination_s3_glue/resource.tf index 02f5bd799..50a910473 100755 --- a/examples/resources/airbyte_destination_s3_glue/resource.tf +++ b/examples/resources/airbyte_destination_s3_glue/resource.tf @@ -2,7 +2,7 @@ resource "airbyte_destination_s3_glue" "my_destination_s3glue" { configuration = { access_key_id = "A012345678910EXAMPLE" destination_type = "s3-glue" - file_name_pattern = "{sync_id}" + file_name_pattern = "{date}" format = { destination_s3_glue_output_format_json_lines_newline_delimited_json = { compression = { @@ -18,11 +18,11 @@ resource "airbyte_destination_s3_glue" "my_destination_s3glue" { glue_serialization_library = "org.openx.data.jsonserde.JsonSerDe" s3_bucket_name = "airbyte_sync" s3_bucket_path = "data_sync/test" - s3_bucket_region = "cn-northwest-1" + s3_bucket_region = "ca-central-1" s3_endpoint = "http://localhost:9000" s3_path_format = "${NAMESPACE}/${STREAM_NAME}/${YEAR}_${MONTH}_${DAY}_${EPOCH}_" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" } - name = "Dr. Iris Hodkiewicz" - workspace_id = "2a54a31e-9476-44a3-a865-e7956f9251a5" + name = "Edmund Daugherty" + workspace_id = "15199ebf-d0e9-4fe6-8632-ca3aed011799" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_sftp_json/resource.tf b/examples/resources/airbyte_destination_sftp_json/resource.tf index e04263e02..81c008f85 100755 --- a/examples/resources/airbyte_destination_sftp_json/resource.tf +++ b/examples/resources/airbyte_destination_sftp_json/resource.tf @@ -5,8 +5,8 @@ resource "airbyte_destination_sftp_json" "my_destination_sftpjson" { host = "...my_host..." password = "...my_password..." port = 22 - username = "Lane.Schuster" + username = "Dayton98" } - name = "Ruth Zulauf" - workspace_id = "7bfaad4f-9efc-41b4-912c-1032648dc2f6" + name = "Terence Beer" + workspace_id = "71778ff6-1d01-4747-a360-a15db6a66065" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_snowflake/resource.tf b/examples/resources/airbyte_destination_snowflake/resource.tf index 179f461bb..53828d8f3 100755 --- a/examples/resources/airbyte_destination_snowflake/resource.tf +++ b/examples/resources/airbyte_destination_snowflake/resource.tf @@ -9,15 +9,14 @@ resource "airbyte_destination_snowflake" "my_destination_snowflake" { } database = "AIRBYTE_DATABASE" destination_type = "snowflake" - host = "accountname.us-east-2.aws.snowflakecomputing.com" + host = "accountname.snowflakecomputing.com" jdbc_url_params = "...my_jdbc_url_params..." raw_data_schema = "...my_raw_data_schema..." role = "AIRBYTE_ROLE" schema = "AIRBYTE_SCHEMA" - use_1s1t_format = true username = "AIRBYTE_USER" warehouse = "AIRBYTE_WAREHOUSE" } - name = "Dr. Terrell Stanton" - workspace_id = "fe6c632c-a3ae-4d01-9799-6312fde04771" + name = "Shaun Osinski" + workspace_id = "851d6c64-5b08-4b61-891b-aa0fe1ade008" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_timeplus/resource.tf b/examples/resources/airbyte_destination_timeplus/resource.tf index 580726907..993ad14c9 100755 --- a/examples/resources/airbyte_destination_timeplus/resource.tf +++ b/examples/resources/airbyte_destination_timeplus/resource.tf @@ -4,6 +4,6 @@ resource "airbyte_destination_timeplus" "my_destination_timeplus" { destination_type = "timeplus" endpoint = "https://us.timeplus.cloud/workspace_id" } - name = "Tamara Lang" - workspace_id = "61d01747-6360-4a15-9b6a-660659a1adea" + name = "Ruben Williamson" + workspace_id = "5f350d8c-db5a-4341-8143-010421813d52" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_typesense/resource.tf b/examples/resources/airbyte_destination_typesense/resource.tf index 509ad4a64..8ac02039f 100755 --- a/examples/resources/airbyte_destination_typesense/resource.tf +++ b/examples/resources/airbyte_destination_typesense/resource.tf @@ -1,12 +1,12 @@ resource "airbyte_destination_typesense" "my_destination_typesense" { configuration = { api_key = "...my_api_key..." - batch_size = "...my_batch_size..." + batch_size = 0 destination_type = "typesense" host = "...my_host..." port = "...my_port..." protocol = "...my_protocol..." } - name = "Wm Hane" - workspace_id = "1d6c645b-08b6-4189-9baa-0fe1ade008e6" + name = "Conrad Rutherford" + workspace_id = "e253b668-451c-46c6-a205-e16deab3fec9" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_vertica/resource.tf b/examples/resources/airbyte_destination_vertica/resource.tf index 1cfe8770c..17bdfbd5c 100755 --- a/examples/resources/airbyte_destination_vertica/resource.tf +++ b/examples/resources/airbyte_destination_vertica/resource.tf @@ -12,8 +12,8 @@ resource "airbyte_destination_vertica" "my_destination_vertica" { tunnel_method = "NO_TUNNEL" } } - username = "Judge_Schinner" + username = "Jackson.Kuvalis" } - name = "Marion Aufderhar" - workspace_id = "cdb5a341-8143-4010-8218-13d5208ece7e" + name = "Ida Lubowitz" + workspace_id = "73a8418d-1623-409f-b092-9921aefb9f58" } \ No newline at end of file diff --git a/examples/resources/airbyte_destination_xata/resource.tf b/examples/resources/airbyte_destination_xata/resource.tf index 11a440b34..082cbbe68 100755 --- a/examples/resources/airbyte_destination_xata/resource.tf +++ b/examples/resources/airbyte_destination_xata/resource.tf @@ -4,6 +4,6 @@ resource "airbyte_destination_xata" "my_destination_xata" { db_url = "https://my-workspace-abc123.us-east-1.xata.sh/db/nyc-taxi-fares:main" destination_type = "xata" } - name = "Sally Dooley" - workspace_id = "68451c6c-6e20-45e1-adea-b3fec9578a64" + name = "Oscar Smith" + workspace_id = "e68e4be0-5601-43f5-9da7-57a59ecfef66" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_aha/resource.tf b/examples/resources/airbyte_source_aha/resource.tf index 18635d439..f1b3af875 100755 --- a/examples/resources/airbyte_source_aha/resource.tf +++ b/examples/resources/airbyte_source_aha/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_aha" "my_source_aha" { source_type = "aha" url = "...my_url..." } - name = "Brandy Gibson" + name = "Van Bergnaum" secret_id = "...my_secret_id..." - workspace_id = "3a8418d1-6230-49fb-8929-921aefb9f58c" + workspace_id = "a3383c2b-eb47-4737-bc8d-72f64d1db1f2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_aircall/resource.tf b/examples/resources/airbyte_source_aircall/resource.tf index 47f540ab7..190aa852c 100755 --- a/examples/resources/airbyte_source_aircall/resource.tf +++ b/examples/resources/airbyte_source_aircall/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_aircall" "my_source_aircall" { source_type = "aircall" start_date = "2022-03-01T00:00:00.000Z" } - name = "Leslie Waters" + name = "Martha Bashirian" secret_id = "...my_secret_id..." - workspace_id = "e4be0560-13f5-49da-b57a-59ecfef66ef1" + workspace_id = "1e96349e-1cf9-4e06-a3a4-37000ae6b6bc" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_airtable/resource.tf b/examples/resources/airbyte_source_airtable/resource.tf index cc1f28ed0..07c584229 100755 --- a/examples/resources/airbyte_source_airtable/resource.tf +++ b/examples/resources/airbyte_source_airtable/resource.tf @@ -7,12 +7,12 @@ resource "airbyte_source_airtable" "my_source_airtable" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2020-12-11T09:39:15.481Z" + token_expiry_date = "2021-08-01T09:41:55.270Z" } } source_type = "airtable" } - name = "Vincent Frami" + name = "Tommie Klocko" secret_id = "...my_secret_id..." - workspace_id = "c2beb477-373c-48d7-af64-d1db1f2c4310" + workspace_id = "eac55a97-41d3-4113-9296-5bb8a7202611" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_alloydb/resource.tf b/examples/resources/airbyte_source_alloydb/resource.tf index 15dc5fe7d..813b5dd16 100755 --- a/examples/resources/airbyte_source_alloydb/resource.tf +++ b/examples/resources/airbyte_source_alloydb/resource.tf @@ -7,12 +7,12 @@ resource "airbyte_source_alloydb" "my_source_alloydb" { port = 5432 replication_method = { source_alloydb_replication_method_logical_replication_cdc_ = { - initial_waiting_seconds = 4 + initial_waiting_seconds = 2 lsn_commit_behaviour = "While reading Data" method = "CDC" plugin = "pgoutput" publication = "...my_publication..." - queue_size = 9 + queue_size = 10 replication_slot = "...my_replication_slot..." } } @@ -30,9 +30,9 @@ resource "airbyte_source_alloydb" "my_source_alloydb" { tunnel_method = "NO_TUNNEL" } } - username = "Keon28" + username = "Ashlynn_Emard" } - name = "Merle Carroll" + name = "Wilbert Crona" secret_id = "...my_secret_id..." - workspace_id = "9e06e3a4-3700-40ae-ab6b-c9b8f759eac5" + workspace_id = "9b1abda8-c070-4e10-84cb-0672d1ad879e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_amazon_ads/resource.tf b/examples/resources/airbyte_source_amazon_ads/resource.tf index 65205a7b5..6a42f262d 100755 --- a/examples/resources/airbyte_source_amazon_ads/resource.tf +++ b/examples/resources/airbyte_source_amazon_ads/resource.tf @@ -3,14 +3,17 @@ resource "airbyte_source_amazon_ads" "my_source_amazonads" { auth_type = "oauth2.0" client_id = "...my_client_id..." client_secret = "...my_client_secret..." - look_back_window = 3 + look_back_window = 10 + marketplace_ids = [ + "...", + ] profiles = [ 6, ] refresh_token = "...my_refresh_token..." region = "EU" report_record_types = [ - "asins_keywords", + "asins_targets", ] source_type = "amazon-ads" start_date = "2022-10-10" @@ -18,7 +21,7 @@ resource "airbyte_source_amazon_ads" "my_source_amazonads" { "archived", ] } - name = "Evelyn Bode" + name = "Dan Towne" secret_id = "...my_secret_id..." - workspace_id = "2965bb8a-7202-4611-835e-139dbc2259b1" + workspace_id = "d02bae0b-e2d7-4822-99e3-ea4b5197f924" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_amazon_seller_partner/resource.tf b/examples/resources/airbyte_source_amazon_seller_partner/resource.tf index fcf5005b3..189c56620 100755 --- a/examples/resources/airbyte_source_amazon_seller_partner/resource.tf +++ b/examples/resources/airbyte_source_amazon_seller_partner/resource.tf @@ -1,23 +1,23 @@ resource "airbyte_source_amazon_seller_partner" "my_source_amazonsellerpartner" { configuration = { - advanced_stream_options = "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}" + advanced_stream_options = "{\"GET_SALES_AND_TRAFFIC_REPORT\": {\"availability_sla_days\": 3}}" auth_type = "oauth2.0" aws_access_key = "...my_aws_access_key..." - aws_environment = "SANDBOX" + aws_environment = "PRODUCTION" aws_secret_key = "...my_aws_secret_key..." lwa_app_id = "...my_lwa_app_id..." lwa_client_secret = "...my_lwa_client_secret..." max_wait_seconds = 1980 - period_in_days = 6 + period_in_days = 5 refresh_token = "...my_refresh_token..." - region = "SE" + region = "SA" replication_end_date = "2017-01-25T00:00:00Z" replication_start_date = "2017-01-25T00:00:00Z" - report_options = "{\"GET_BRAND_ANALYTICS_SEARCH_TERMS_REPORT\": {\"reportPeriod\": \"WEEK\"}}" + report_options = "{\"GET_SOME_REPORT\": {\"custom\": \"true\"}}" role_arn = "...my_role_arn..." source_type = "amazon-seller-partner" } - name = "Mr. Angela Volkman" + name = "Phyllis Quitzon" secret_id = "...my_secret_id..." - workspace_id = "4cb0672d-1ad8-479e-ab96-65b85efbd02b" + workspace_id = "5c537c64-54ef-4b0b-b489-6c3ca5acfbe2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_amazon_sqs/resource.tf b/examples/resources/airbyte_source_amazon_sqs/resource.tf index 1e3580a78..9721a2135 100755 --- a/examples/resources/airbyte_source_amazon_sqs/resource.tf +++ b/examples/resources/airbyte_source_amazon_sqs/resource.tf @@ -2,16 +2,16 @@ resource "airbyte_source_amazon_sqs" "my_source_amazonsqs" { configuration = { access_key = "xxxxxHRNxxx3TBxxxxxx" attributes_to_return = "attr1,attr2" - delete_messages = true + delete_messages = false max_batch_size = 5 max_wait_time = 5 queue_url = "https://sqs.eu-west-1.amazonaws.com/1234567890/my-example-queue" - region = "eu-south-1" + region = "ap-southeast-2" secret_key = "hu+qE5exxxxT6o/ZrKsxxxxxxBhxxXLexxxxxVKz" source_type = "amazon-sqs" visibility_timeout = 15 } - name = "Rosemarie Kub" + name = "Cathy Kirlin" secret_id = "...my_secret_id..." - workspace_id = "259e3ea4-b519-47f9-a443-da7ce52b895c" + workspace_id = "29177dea-c646-4ecb-9734-09e3eb1e5a2b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_amplitude/resource.tf b/examples/resources/airbyte_source_amplitude/resource.tf index 71dcd7a42..4266cd121 100755 --- a/examples/resources/airbyte_source_amplitude/resource.tf +++ b/examples/resources/airbyte_source_amplitude/resource.tf @@ -2,12 +2,12 @@ resource "airbyte_source_amplitude" "my_source_amplitude" { configuration = { api_key = "...my_api_key..." data_region = "Standard Server" - request_time_range = 2 + request_time_range = 1 secret_key = "...my_secret_key..." source_type = "amplitude" start_date = "2021-01-25T00:00:00Z" } - name = "Sadie Kemmer" + name = "Robin Bednar" secret_id = "...my_secret_id..." - workspace_id = "4efb0b34-896c-43ca-9acf-be2fd5707577" + workspace_id = "116db995-45fc-495f-a889-70e189dbb30f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_apify_dataset/resource.tf b/examples/resources/airbyte_source_apify_dataset/resource.tf index c294903ef..282254900 100755 --- a/examples/resources/airbyte_source_apify_dataset/resource.tf +++ b/examples/resources/airbyte_source_apify_dataset/resource.tf @@ -3,8 +3,9 @@ resource "airbyte_source_apify_dataset" "my_source_apifydataset" { clean = true dataset_id = "...my_dataset_id..." source_type = "apify-dataset" + token = "Personal API tokens" } - name = "Natasha Bogan" + name = "Dale Ferry" secret_id = "...my_secret_id..." - workspace_id = "deac646e-cb57-4340-9e3e-b1e5a2b12eb0" + workspace_id = "055b197c-d44e-42f5-ad82-d3513bb6f48b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_appfollow/resource.tf b/examples/resources/airbyte_source_appfollow/resource.tf index 032abc0f4..1870408d1 100755 --- a/examples/resources/airbyte_source_appfollow/resource.tf +++ b/examples/resources/airbyte_source_appfollow/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_appfollow" "my_source_appfollow" { api_secret = "...my_api_secret..." source_type = "appfollow" } - name = "Mrs. Elisa Bogisich" + name = "Regina Huel" secret_id = "...my_secret_id..." - workspace_id = "b99545fc-95fa-4889-b0e1-89dbb30fcb33" + workspace_id = "db35ff2e-4b27-4537-a8cd-9e7319c177d5" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_asana/resource.tf b/examples/resources/airbyte_source_asana/resource.tf index 460be821f..24081f27a 100755 --- a/examples/resources/airbyte_source_asana/resource.tf +++ b/examples/resources/airbyte_source_asana/resource.tf @@ -10,7 +10,7 @@ resource "airbyte_source_asana" "my_source_asana" { } source_type = "asana" } - name = "Doug Baumbach" + name = "Jill Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "b197cd44-e2f5-42d8-ad35-13bb6f48b656" + workspace_id = "b114eeb5-2ff7-485f-8378-14d4c98e0c2b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_auth0/resource.tf b/examples/resources/airbyte_source_auth0/resource.tf index 6b73a6fde..91e63dcbc 100755 --- a/examples/resources/airbyte_source_auth0/resource.tf +++ b/examples/resources/airbyte_source_auth0/resource.tf @@ -8,8 +8,9 @@ resource "airbyte_source_auth0" "my_source_auth0" { } } source_type = "auth0" + start_date = "2023-08-05T00:43:59.244Z" } - name = "Roosevelt Schultz" + name = "Willard McLaughlin" secret_id = "...my_secret_id..." - workspace_id = "5ff2e4b2-7537-4a8c-99e7-319c177d525f" + workspace_id = "75dad636-c600-4503-98bb-31180f739ae9" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_aws_cloudtrail/resource.tf b/examples/resources/airbyte_source_aws_cloudtrail/resource.tf index 727d43924..524f4ccc9 100755 --- a/examples/resources/airbyte_source_aws_cloudtrail/resource.tf +++ b/examples/resources/airbyte_source_aws_cloudtrail/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_aws_cloudtrail" "my_source_awscloudtrail" { source_type = "aws-cloudtrail" start_date = "2021-01-01" } - name = "Gregory Bernhard" + name = "Nellie Waters" secret_id = "...my_secret_id..." - workspace_id = "eb52ff78-5fc3-4781-8d4c-98e0c2bb89eb" + workspace_id = "09e28103-31f3-4981-94c7-00b607f3c93c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_azure_blob_storage/resource.tf b/examples/resources/airbyte_source_azure_blob_storage/resource.tf index 619d54170..cbf51bfc3 100755 --- a/examples/resources/airbyte_source_azure_blob_storage/resource.tf +++ b/examples/resources/airbyte_source_azure_blob_storage/resource.tf @@ -13,7 +13,7 @@ resource "airbyte_source_azure_blob_storage" "my_source_azureblobstorage" { } source_type = "azure-blob-storage" } - name = "Cristina Murphy" + name = "Patty Mraz" secret_id = "...my_secret_id..." - workspace_id = "36c60050-3d8b-4b31-980f-739ae9e057eb" + workspace_id = "3f2ceda7-e23f-4225-b411-faf4b7544e47" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_azure_table/resource.tf b/examples/resources/airbyte_source_azure_table/resource.tf index 21c0d917a..f8b67a2aa 100755 --- a/examples/resources/airbyte_source_azure_table/resource.tf +++ b/examples/resources/airbyte_source_azure_table/resource.tf @@ -3,9 +3,9 @@ resource "airbyte_source_azure_table" "my_source_azuretable" { source_type = "azure-table" storage_access_key = "...my_storage_access_key..." storage_account_name = "...my_storage_account_name..." - storage_endpoint_suffix = "core.chinacloudapi.cn" + storage_endpoint_suffix = "core.windows.net" } - name = "Bobbie Thompson" + name = "Ian Baumbach" secret_id = "...my_secret_id..." - workspace_id = "10331f39-81d4-4c70-8b60-7f3c93c73b9d" + workspace_id = "57a5b404-63a7-4d57-9f14-00e764ad7334" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_bamboo_hr/resource.tf b/examples/resources/airbyte_source_bamboo_hr/resource.tf index d021452f9..034bf7c8c 100755 --- a/examples/resources/airbyte_source_bamboo_hr/resource.tf +++ b/examples/resources/airbyte_source_bamboo_hr/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_bamboo_hr" "my_source_bamboohr" { source_type = "bamboo-hr" subdomain = "...my_subdomain..." } - name = "Mandy Collier" + name = "Ralph Rau" secret_id = "...my_secret_id..." - workspace_id = "da7e23f2-2574-411f-af4b-7544e472e802" + workspace_id = "1b36a080-88d1-400e-bada-200ef0422eb2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_bigcommerce/resource.tf b/examples/resources/airbyte_source_bigcommerce/resource.tf index 9d618b62e..8235355de 100755 --- a/examples/resources/airbyte_source_bigcommerce/resource.tf +++ b/examples/resources/airbyte_source_bigcommerce/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_bigcommerce" "my_source_bigcommerce" { start_date = "2021-01-01" store_hash = "...my_store_hash..." } - name = "Bill Kling" + name = "Beth Gleason" secret_id = "...my_secret_id..." - workspace_id = "b40463a7-d575-4f14-80e7-64ad7334ec1b" + workspace_id = "9ab8366c-723f-4fda-9e06-bee4825c1fc0" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_bigquery/resource.tf b/examples/resources/airbyte_source_bigquery/resource.tf index c2a6dce1b..c9122f490 100755 --- a/examples/resources/airbyte_source_bigquery/resource.tf +++ b/examples/resources/airbyte_source_bigquery/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_bigquery" "my_source_bigquery" { project_id = "...my_project_id..." source_type = "bigquery" } - name = "Tracey Bosco" + name = "Joe Bradtke" secret_id = "...my_secret_id..." - workspace_id = "6a08088d-100e-4fad-a200-ef0422eb2164" + workspace_id = "80bff918-544e-4c42-9efc-ce8f1977773e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_bing_ads/resource.tf b/examples/resources/airbyte_source_bing_ads/resource.tf index f8255b5ba..7633b99ea 100755 --- a/examples/resources/airbyte_source_bing_ads/resource.tf +++ b/examples/resources/airbyte_source_bing_ads/resource.tf @@ -4,13 +4,13 @@ resource "airbyte_source_bing_ads" "my_source_bingads" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." developer_token = "...my_developer_token..." - lookback_window = 8 + lookback_window = 4 refresh_token = "...my_refresh_token..." - reports_start_date = "2021-04-24" + reports_start_date = "2022-08-23" source_type = "bing-ads" tenant_id = "...my_tenant_id..." } - name = "Randolph Lebsack" + name = "Kathryn Nitzsche" secret_id = "...my_secret_id..." - workspace_id = "6c723ffd-a9e0-46be-a482-5c1fc0e115c8" + workspace_id = "408f05e3-d48f-4daf-b13a-1f5fd94259c0" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_braintree/resource.tf b/examples/resources/airbyte_source_braintree/resource.tf index 4c5035c24..e67333ee3 100755 --- a/examples/resources/airbyte_source_braintree/resource.tf +++ b/examples/resources/airbyte_source_braintree/resource.tf @@ -1,13 +1,13 @@ resource "airbyte_source_braintree" "my_source_braintree" { configuration = { - environment = "Qa" + environment = "Development" merchant_id = "...my_merchant_id..." private_key = "...my_private_key..." public_key = "...my_public_key..." source_type = "braintree" - start_date = "2020-11-22 20:20:05" + start_date = "2020-12-30" } - name = "Olga Hermiston" + name = "Henrietta Nienow" secret_id = "...my_secret_id..." - workspace_id = "ec42defc-ce8f-4197-b773-e63562a7b408" + workspace_id = "4f3b756c-11f6-4c37-a512-6243835bbc05" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_braze/resource.tf b/examples/resources/airbyte_source_braze/resource.tf index 154da8535..ffaee2c2e 100755 --- a/examples/resources/airbyte_source_braze/resource.tf +++ b/examples/resources/airbyte_source_braze/resource.tf @@ -2,10 +2,10 @@ resource "airbyte_source_braze" "my_source_braze" { configuration = { api_key = "...my_api_key..." source_type = "braze" - start_date = "2022-11-23" + start_date = "2022-09-06" url = "...my_url..." } - name = "Tricia DuBuque" + name = "Rosie Glover" secret_id = "...my_secret_id..." - workspace_id = "8fdaf313-a1f5-4fd9-8259-c0b36f25ea94" + workspace_id = "efc5fde1-0a0c-4e21-a9e5-10019c6dc5e3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_chargebee/resource.tf b/examples/resources/airbyte_source_chargebee/resource.tf index 341d41a07..20c68d417 100755 --- a/examples/resources/airbyte_source_chargebee/resource.tf +++ b/examples/resources/airbyte_source_chargebee/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_chargebee" "my_source_chargebee" { source_type = "chargebee" start_date = "2021-01-25T00:00:00Z" } - name = "Harvey Harber" + name = "Viola Morissette" secret_id = "...my_secret_id..." - workspace_id = "11f6c37a-5126-4243-835b-bc05a23a45ce" + workspace_id = "fbbe6949-fb2b-4b4e-8ae6-c3d5db3adebd" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_chartmogul/resource.tf b/examples/resources/airbyte_source_chartmogul/resource.tf index 4c2e02d49..26786c0bb 100755 --- a/examples/resources/airbyte_source_chartmogul/resource.tf +++ b/examples/resources/airbyte_source_chartmogul/resource.tf @@ -1,11 +1,11 @@ resource "airbyte_source_chartmogul" "my_source_chartmogul" { configuration = { api_key = "...my_api_key..." - interval = "quarter" + interval = "week" source_type = "chartmogul" start_date = "2017-01-25T00:00:00Z" } - name = "Mr. Kristopher Torphy" + name = "Neal Gorczany" secret_id = "...my_secret_id..." - workspace_id = "0ce2169e-5100-419c-adc5-e34762799bfb" + workspace_id = "06a8aa94-c026-444c-b5e9-d9a4578adc1a" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_clickhouse/resource.tf b/examples/resources/airbyte_source_clickhouse/resource.tf index 0f1c02fd8..60847903f 100755 --- a/examples/resources/airbyte_source_clickhouse/resource.tf +++ b/examples/resources/airbyte_source_clickhouse/resource.tf @@ -10,9 +10,9 @@ resource "airbyte_source_clickhouse" "my_source_clickhouse" { tunnel_method = "NO_TUNNEL" } } - username = "Toni.Hudson" + username = "Gerry81" } - name = "Irvin Rippin" + name = "Mr. Simon Altenwerth" secret_id = "...my_secret_id..." - workspace_id = "b4ecae6c-3d5d-4b3a-9ebd-5daea4c506a8" + workspace_id = "c802e2ec-09ff-48f0-b816-ff3477c13e90" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_clickup_api/resource.tf b/examples/resources/airbyte_source_clickup_api/resource.tf index a7de2215d..f07e8ffae 100755 --- a/examples/resources/airbyte_source_clickup_api/resource.tf +++ b/examples/resources/airbyte_source_clickup_api/resource.tf @@ -2,13 +2,13 @@ resource "airbyte_source_clickup_api" "my_source_clickupapi" { configuration = { api_token = "...my_api_token..." folder_id = "...my_folder_id..." - include_closed_tasks = false + include_closed_tasks = true list_id = "...my_list_id..." source_type = "clickup-api" space_id = "...my_space_id..." team_id = "...my_team_id..." } - name = "Sergio Grant Sr." + name = "Mr. Jack Gottlieb" secret_id = "...my_secret_id..." - workspace_id = "644cf5e9-d9a4-4578-adc1-ac600dec001a" + workspace_id = "b0960a66-8151-4a47-aaf9-23c5949f83f3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_clockify/resource.tf b/examples/resources/airbyte_source_clockify/resource.tf index 2e2a0ea93..6e40ed5d1 100755 --- a/examples/resources/airbyte_source_clockify/resource.tf +++ b/examples/resources/airbyte_source_clockify/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_clockify" "my_source_clockify" { source_type = "clockify" workspace_id = "...my_workspace_id..." } - name = "Guy Beier" + name = "Angela Schaefer" secret_id = "...my_secret_id..." - workspace_id = "2ec09ff8-f0f8-416f-b347-7c13e902c141" + workspace_id = "76ffb901-c6ec-4bb4-a243-cf789ffafeda" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_close_com/resource.tf b/examples/resources/airbyte_source_close_com/resource.tf index 2a9b122cc..c353f6aa5 100755 --- a/examples/resources/airbyte_source_close_com/resource.tf +++ b/examples/resources/airbyte_source_close_com/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_close_com" "my_source_closecom" { source_type = "close-com" start_date = "2021-01-01" } - name = "Robert Muller MD" + name = "Ronnie Nikolaus" secret_id = "...my_secret_id..." - workspace_id = "668151a4-72af-4923-8594-9f83f350cf87" + workspace_id = "e0ac184c-2b9c-4247-8883-73a40e1942f3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_coda/resource.tf b/examples/resources/airbyte_source_coda/resource.tf index d45ef731f..ad3f1aa2b 100755 --- a/examples/resources/airbyte_source_coda/resource.tf +++ b/examples/resources/airbyte_source_coda/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_coda" "my_source_coda" { auth_token = "...my_auth_token..." source_type = "coda" } - name = "Nettie Wilkinson" + name = "Lila Harris II" secret_id = "...my_secret_id..." - workspace_id = "01c6ecbb-4e24-43cf-b89f-fafeda53e5ae" + workspace_id = "5756f5d5-6d0b-4d0a-b2df-e13db4f62cba" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_coin_api/resource.tf b/examples/resources/airbyte_source_coin_api/resource.tf index 8c208a1f7..20698fb64 100755 --- a/examples/resources/airbyte_source_coin_api/resource.tf +++ b/examples/resources/airbyte_source_coin_api/resource.tf @@ -4,12 +4,12 @@ resource "airbyte_source_coin_api" "my_source_coinapi" { end_date = "2019-01-01T00:00:00" environment = "sandbox" limit = 10 - period = "5SEC" + period = "2MTH" source_type = "coin-api" start_date = "2019-01-01T00:00:00" symbol_id = "...my_symbol_id..." } - name = "Myron Boyle" + name = "Francis Boyle" secret_id = "...my_secret_id..." - workspace_id = "c2b9c247-c883-473a-80e1-942f32e55055" + workspace_id = "bc0b80a6-924d-43b2-acfc-c8f895010f5d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_coinmarketcap/resource.tf b/examples/resources/airbyte_source_coinmarketcap/resource.tf index 801976ddc..4169d481c 100755 --- a/examples/resources/airbyte_source_coinmarketcap/resource.tf +++ b/examples/resources/airbyte_source_coinmarketcap/resource.tf @@ -1,13 +1,13 @@ resource "airbyte_source_coinmarketcap" "my_source_coinmarketcap" { configuration = { api_key = "...my_api_key..." - data_type = "latest" + data_type = "historical" source_type = "coinmarketcap" symbols = [ "...", ] } - name = "Elsie West" + name = "Meredith Kassulke" secret_id = "...my_secret_id..." - workspace_id = "56d0bd0a-f2df-4e13-9b4f-62cba3f8941a" + workspace_id = "1804e54c-82f1-468a-b63c-8873e484380b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_configcat/resource.tf b/examples/resources/airbyte_source_configcat/resource.tf index 376511b00..1e2c23e89 100755 --- a/examples/resources/airbyte_source_configcat/resource.tf +++ b/examples/resources/airbyte_source_configcat/resource.tf @@ -2,9 +2,9 @@ resource "airbyte_source_configcat" "my_source_configcat" { configuration = { password = "...my_password..." source_type = "configcat" - username = "Shaylee.Pfeffer71" + username = "Art_Wiegand" } - name = "Timothy Nolan" + name = "Lowell Oberbrunner" secret_id = "...my_secret_id..." - workspace_id = "24d3b2ec-fcc8-4f89-9010-f5dd3d6fa180" + workspace_id = "5a60a04c-495c-4c69-9171-b51c1bdb1cf4" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_confluence/resource.tf b/examples/resources/airbyte_source_confluence/resource.tf index 8310f7da3..3234b7a6e 100755 --- a/examples/resources/airbyte_source_confluence/resource.tf +++ b/examples/resources/airbyte_source_confluence/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_confluence" "my_source_confluence" { email = "abc@example.com" source_type = "confluence" } - name = "Ms. Russell Wunsch" + name = "Jody Will" secret_id = "...my_secret_id..." - workspace_id = "a363c887-3e48-4438-8b1f-6b8ca275a60a" + workspace_id = "ccca99bc-7fc0-4b2d-8e10-873e42b006d6" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_convex/resource.tf b/examples/resources/airbyte_source_convex/resource.tf index 68ebd5742..ea4e9c71d 100755 --- a/examples/resources/airbyte_source_convex/resource.tf +++ b/examples/resources/airbyte_source_convex/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_convex" "my_source_convex" { deployment_url = "https://murky-swan-635.convex.cloud" source_type = "convex" } - name = "Jody Gutmann" + name = "Guy Kovacek" secret_id = "...my_secret_id..." - workspace_id = "cc699171-b51c-41bd-b1cf-4b888ebdfc4c" + workspace_id = "a8581a58-208c-454f-afa9-c95f2eac5565" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_datadog/resource.tf b/examples/resources/airbyte_source_datadog/resource.tf deleted file mode 100755 index c8fa55cc3..000000000 --- a/examples/resources/airbyte_source_datadog/resource.tf +++ /dev/null @@ -1,22 +0,0 @@ -resource "airbyte_source_datadog" "my_source_datadog" { - configuration = { - api_key = "...my_api_key..." - application_key = "...my_application_key..." - end_date = "2022-10-01T00:00:00Z" - max_records_per_request = 8 - queries = [ - { - data_source = "rum" - name = "Marshall McClure" - query = "...my_query..." - }, - ] - query = "...my_query..." - site = "us5.datadoghq.com" - source_type = "datadog" - start_date = "2022-10-01T00:00:00Z" - } - name = "Wilbert Bashirian" - secret_id = "...my_secret_id..." - workspace_id = "dce10873-e42b-4006-9678-878ba8581a58" -} \ No newline at end of file diff --git a/examples/resources/airbyte_source_datascope/resource.tf b/examples/resources/airbyte_source_datascope/resource.tf index 300616789..e590aad92 100755 --- a/examples/resources/airbyte_source_datascope/resource.tf +++ b/examples/resources/airbyte_source_datascope/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_datascope" "my_source_datascope" { source_type = "datascope" start_date = "dd/mm/YYYY HH:MM" } - name = "Amy Lynch" + name = "Danny Bahringer" secret_id = "...my_secret_id..." - workspace_id = "4fefa9c9-5f2e-4ac5-965d-307cfee81206" + workspace_id = "fee81206-e281-43fa-8a41-c480d3f2132a" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_delighted/resource.tf b/examples/resources/airbyte_source_delighted/resource.tf index e8b1f32dc..17c514f7c 100755 --- a/examples/resources/airbyte_source_delighted/resource.tf +++ b/examples/resources/airbyte_source_delighted/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_delighted" "my_source_delighted" { since = "2022-05-30 04:50:23" source_type = "delighted" } - name = "Peggy Windler" + name = "Sarah Collier" secret_id = "...my_secret_id..." - workspace_id = "a41c480d-3f21-432a-b031-02d514f4cc6f" + workspace_id = "14f4cc6f-18bf-4962-9a6a-4f77a87ee3e4" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_dixa/resource.tf b/examples/resources/airbyte_source_dixa/resource.tf index 91c32260f..80cc2652d 100755 --- a/examples/resources/airbyte_source_dixa/resource.tf +++ b/examples/resources/airbyte_source_dixa/resource.tf @@ -1,11 +1,11 @@ resource "airbyte_source_dixa" "my_source_dixa" { configuration = { api_token = "...my_api_token..." - batch_size = 1 + batch_size = 31 source_type = "dixa" start_date = "YYYY-MM-DD" } - name = "Irvin Marks" + name = "Brittany Cole" secret_id = "...my_secret_id..." - workspace_id = "1a6a4f77-a87e-4e3e-8be7-52c65b34418e" + workspace_id = "5b34418e-3bb9-41c8-9975-e0e8419d8f84" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_dockerhub/resource.tf b/examples/resources/airbyte_source_dockerhub/resource.tf index b37cd405f..7c3860795 100755 --- a/examples/resources/airbyte_source_dockerhub/resource.tf +++ b/examples/resources/airbyte_source_dockerhub/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_dockerhub" "my_source_dockerhub" { docker_username = "airbyte" source_type = "dockerhub" } - name = "Kristy Renner DDS" + name = "Joe Haag" secret_id = "...my_secret_id..." - workspace_id = "8d975e0e-8419-4d8f-84f1-44f3e07edcc4" + workspace_id = "3e07edcc-4aa5-4f3c-abd9-05a972e05672" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_dremio/resource.tf b/examples/resources/airbyte_source_dremio/resource.tf index bfedb78a8..b46396f97 100755 --- a/examples/resources/airbyte_source_dremio/resource.tf +++ b/examples/resources/airbyte_source_dremio/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_dremio" "my_source_dremio" { base_url = "...my_base_url..." source_type = "dremio" } - name = "Doug Hammes" + name = "Aaron Connelly" secret_id = "...my_secret_id..." - workspace_id = "cabd905a-972e-4056-b282-27b2d309470b" + workspace_id = "2d309470-bf7a-44fa-87cf-535a6fae54eb" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_dynamodb/resource.tf b/examples/resources/airbyte_source_dynamodb/resource.tf index 0aef99e39..c50483ed3 100755 --- a/examples/resources/airbyte_source_dynamodb/resource.tf +++ b/examples/resources/airbyte_source_dynamodb/resource.tf @@ -2,12 +2,12 @@ resource "airbyte_source_dynamodb" "my_source_dynamodb" { configuration = { access_key_id = "A012345678910EXAMPLE" endpoint = "https://{aws_dynamo_db_url}.com" - region = "us-gov-east-1" + region = "us-gov-west-1" reserved_attribute_names = "name, field_name, field-name" secret_access_key = "a012345678910ABCDEFGH/AbCdEfGhEXAMPLEKEY" source_type = "dynamodb" } - name = "Monique Hackett" + name = "Sandra Rowe Sr." secret_id = "...my_secret_id..." - workspace_id = "87cf535a-6fae-454e-bf60-c321f023b75d" + workspace_id = "f023b75d-2367-4fe1-a0cc-8df79f0a396d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_e2e_test_cloud/resource.tf b/examples/resources/airbyte_source_e2e_test_cloud/resource.tf index 93ef95a61..9c1ae62bb 100755 --- a/examples/resources/airbyte_source_e2e_test_cloud/resource.tf +++ b/examples/resources/airbyte_source_e2e_test_cloud/resource.tf @@ -1,7 +1,7 @@ resource "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" { configuration = { - max_messages = 2 - message_interval_ms = 2 + max_messages = 6 + message_interval_ms = 0 mock_catalog = { source_e2e_test_cloud_mock_catalog_multi_schema = { stream_schemas = "...my_stream_schemas..." @@ -12,7 +12,7 @@ resource "airbyte_source_e2e_test_cloud" "my_source_e2etestcloud" { source_type = "e2e-test-cloud" type = "CONTINUOUS_FEED" } - name = "Miss Johanna Ward DDS" + name = "Gertrude Grant" secret_id = "...my_secret_id..." - workspace_id = "c8df79f0-a396-4d90-8364-b7c15dfbace1" + workspace_id = "c15dfbac-e188-4b1c-8ee2-c8c6ce611fee" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_emailoctopus/resource.tf b/examples/resources/airbyte_source_emailoctopus/resource.tf index 33269ce9b..c745d15ff 100755 --- a/examples/resources/airbyte_source_emailoctopus/resource.tf +++ b/examples/resources/airbyte_source_emailoctopus/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_emailoctopus" "my_source_emailoctopus" { api_key = "...my_api_key..." source_type = "emailoctopus" } - name = "Miss Nelson Robel" + name = "Gregory Satterfield" secret_id = "...my_secret_id..." - workspace_id = "ee2c8c6c-e611-4fee-b1c7-cbdb6eec7437" + workspace_id = "bdb6eec7-4378-4ba2-9317-747dc915ad2c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_exchange_rates/resource.tf b/examples/resources/airbyte_source_exchange_rates/resource.tf index 41e7befec..dd195c09e 100755 --- a/examples/resources/airbyte_source_exchange_rates/resource.tf +++ b/examples/resources/airbyte_source_exchange_rates/resource.tf @@ -2,11 +2,11 @@ resource "airbyte_source_exchange_rates" "my_source_exchangerates" { configuration = { access_key = "...my_access_key..." base = "USD" - ignore_weekends = true + ignore_weekends = false source_type = "exchange-rates" start_date = "YYYY-MM-DD" } - name = "Annie Breitenberg" + name = "Mrs. Leslie Klocko" secret_id = "...my_secret_id..." - workspace_id = "47dc915a-d2ca-4f5d-9672-3dc0f5ae2f3a" + workspace_id = "c0f5ae2f-3a6b-4700-8787-56143f5a6c98" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_facebook_marketing/resource.tf b/examples/resources/airbyte_source_facebook_marketing/resource.tf index 768ab192c..9bb332c63 100755 --- a/examples/resources/airbyte_source_facebook_marketing/resource.tf +++ b/examples/resources/airbyte_source_facebook_marketing/resource.tf @@ -2,39 +2,39 @@ resource "airbyte_source_facebook_marketing" "my_source_facebookmarketing" { configuration = { access_token = "...my_access_token..." account_id = "111111111111111" - action_breakdowns_allow_empty = false + action_breakdowns_allow_empty = true client_id = "...my_client_id..." client_secret = "...my_client_secret..." custom_insights = [ { action_breakdowns = [ - "action_target_id", + "action_destination", ] - action_report_time = "impression" + action_report_time = "conversion" breakdowns = [ - "age", + "frequency_value", ] end_date = "2017-01-26T00:00:00Z" fields = [ - "estimated_ad_recall_rate_lower_bound", + "account_name", ] insights_lookback_window = 6 - level = "adset" - name = "Vera Bernhard" + level = "ad" + name = "Jesus Batz" start_date = "2017-01-25T00:00:00Z" - time_increment = 7 + time_increment = 8 }, ] end_date = "2017-01-26T00:00:00Z" fetch_thumbnail_images = false - include_deleted = false - insights_lookback_window = 7 - max_batch_size = 4 + include_deleted = true + insights_lookback_window = 4 + max_batch_size = 7 page_size = 3 source_type = "facebook-marketing" start_date = "2017-01-25T00:00:00Z" } - name = "Dr. Dorothy Lockman" + name = "Ms. Wilbert McGlynn" secret_id = "...my_secret_id..." - workspace_id = "0bcacc6c-bd6b-45f3-ac90-9304f926bad2" + workspace_id = "04f926ba-d255-4381-9b47-4b0ed20e5624" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_facebook_pages/resource.tf b/examples/resources/airbyte_source_facebook_pages/resource.tf index 463349589..cd4d30448 100755 --- a/examples/resources/airbyte_source_facebook_pages/resource.tf +++ b/examples/resources/airbyte_source_facebook_pages/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_facebook_pages" "my_source_facebookpages" { page_id = "...my_page_id..." source_type = "facebook-pages" } - name = "Bernice Donnelly V" + name = "Moses Wuckert" secret_id = "...my_secret_id..." - workspace_id = "b474b0ed-20e5-4624-8fff-639a910abdca" + workspace_id = "39a910ab-dcab-4626-b669-6e1ec00221b3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_faker/resource.tf b/examples/resources/airbyte_source_faker/resource.tf index 7f2fd2379..5f5e8e89b 100755 --- a/examples/resources/airbyte_source_faker/resource.tf +++ b/examples/resources/airbyte_source_faker/resource.tf @@ -1,13 +1,13 @@ resource "airbyte_source_faker" "my_source_faker" { configuration = { always_updated = false - count = 4 - parallelism = 1 - records_per_slice = 4 - seed = 4 + count = 3 + parallelism = 9 + records_per_slice = 5 + seed = 6 source_type = "faker" } - name = "Beth McKenzie" + name = "Delbert Reynolds" secret_id = "...my_secret_id..." - workspace_id = "1ec00221-b335-4d89-acb3-ecfda8d0c549" + workspace_id = "cfda8d0c-549e-4f03-8049-78a61fa1cf20" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_fauna/resource.tf b/examples/resources/airbyte_source_fauna/resource.tf index 74d7c0cf4..d5047d592 100755 --- a/examples/resources/airbyte_source_fauna/resource.tf +++ b/examples/resources/airbyte_source_fauna/resource.tf @@ -6,15 +6,15 @@ resource "airbyte_source_fauna" "my_source_fauna" { deletion_mode = "ignore" } } - page_size = 9 + page_size = 4 } domain = "...my_domain..." - port = 10 + port = 5 scheme = "...my_scheme..." secret = "...my_secret..." source_type = "fauna" } - name = "Mrs. Edna Abbott" + name = "Irvin Klein" secret_id = "...my_secret_id..." - workspace_id = "78a61fa1-cf20-4688-b77c-1ffc71dca163" + workspace_id = "1ffc71dc-a163-4f2a-bc80-a97ff334cddf" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_file_secure/resource.tf b/examples/resources/airbyte_source_file_secure/resource.tf index 2bc3a67be..3a1bb2009 100755 --- a/examples/resources/airbyte_source_file_secure/resource.tf +++ b/examples/resources/airbyte_source_file_secure/resource.tf @@ -1,7 +1,7 @@ resource "airbyte_source_file_secure" "my_source_filesecure" { configuration = { dataset_name = "...my_dataset_name..." - format = "yaml" + format = "excel_binary" provider = { source_file_secure_storage_provider_az_blob_azure_blob_storage = { sas_token = "...my_sas_token..." @@ -10,11 +10,11 @@ resource "airbyte_source_file_secure" "my_source_filesecure" { storage_account = "...my_storage_account..." } } - reader_options = "{}" + reader_options = "{\"sep\": \" \"}" source_type = "file-secure" url = "gs://my-google-bucket/data.csv" } - name = "Miss Sheri Legros" + name = "Freddie Von V" secret_id = "...my_secret_id..." - workspace_id = "7ff334cd-df85-47a9-a618-76c6ab21d29d" + workspace_id = "76c6ab21-d29d-4fc9-8d6f-ecd799390066" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_firebolt/resource.tf b/examples/resources/airbyte_source_firebolt/resource.tf index 95a416fa2..5c5ed6ec8 100755 --- a/examples/resources/airbyte_source_firebolt/resource.tf +++ b/examples/resources/airbyte_source_firebolt/resource.tf @@ -8,7 +8,7 @@ resource "airbyte_source_firebolt" "my_source_firebolt" { source_type = "firebolt" username = "username@email.com" } - name = "Hector Yundt" + name = "Donna Abshire" secret_id = "...my_secret_id..." - workspace_id = "d7993900-66a6-4d2d-8003-55338cec086f" + workspace_id = "5338cec0-86fa-421e-9152-cb3119167b8e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_freshcaller/resource.tf b/examples/resources/airbyte_source_freshcaller/resource.tf index 40fe6b8db..ffed3f24c 100755 --- a/examples/resources/airbyte_source_freshcaller/resource.tf +++ b/examples/resources/airbyte_source_freshcaller/resource.tf @@ -2,12 +2,12 @@ resource "airbyte_source_freshcaller" "my_source_freshcaller" { configuration = { api_key = "...my_api_key..." domain = "snaptravel" - requests_per_minute = 7 + requests_per_minute = 2 source_type = "freshcaller" start_date = "2022-01-01T12:00:00Z" sync_lag_minutes = 9 } - name = "Keith Hills" + name = "Kenneth Friesen IV" secret_id = "...my_secret_id..." - workspace_id = "b3119167-b8e3-4c8d-b034-08d6d364ffd4" + workspace_id = "d6d364ff-d455-4906-9126-3d48e935c2c9" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_freshdesk/resource.tf b/examples/resources/airbyte_source_freshdesk/resource.tf index 6c32d0232..150e6b282 100755 --- a/examples/resources/airbyte_source_freshdesk/resource.tf +++ b/examples/resources/airbyte_source_freshdesk/resource.tf @@ -2,11 +2,11 @@ resource "airbyte_source_freshdesk" "my_source_freshdesk" { configuration = { api_key = "...my_api_key..." domain = "myaccount.freshdesk.com" - requests_per_minute = 3 + requests_per_minute = 10 source_type = "freshdesk" start_date = "2020-12-01T00:00:00Z" } - name = "Mr. Tara Sporer" + name = "Dale Altenwerth" secret_id = "...my_secret_id..." - workspace_id = "3d48e935-c2c9-4e81-b30b-e3e43202d721" + workspace_id = "3e43202d-7216-4576-9066-41870d9d21f9" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_freshsales/resource.tf b/examples/resources/airbyte_source_freshsales/resource.tf index 9d4bab1ae..c3410eb5d 100755 --- a/examples/resources/airbyte_source_freshsales/resource.tf +++ b/examples/resources/airbyte_source_freshsales/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_freshsales" "my_source_freshsales" { domain_name = "mydomain.myfreshworks.com" source_type = "freshsales" } - name = "Geraldine Kling" + name = "Gustavo Adams DDS" secret_id = "...my_secret_id..." - workspace_id = "06641870-d9d2-41f9-ad03-0c4ecc11a083" + workspace_id = "4ecc11a0-8364-4290-a8b8-502a55e7f73b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_gainsight_px/resource.tf b/examples/resources/airbyte_source_gainsight_px/resource.tf index c4fffe157..6eb739429 100755 --- a/examples/resources/airbyte_source_gainsight_px/resource.tf +++ b/examples/resources/airbyte_source_gainsight_px/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_gainsight_px" "my_source_gainsightpx" { api_key = "...my_api_key..." source_type = "gainsight-px" } - name = "Anita Dare III" + name = "Hugh Goodwin" secret_id = "...my_secret_id..." - workspace_id = "8b8502a5-5e7f-473b-8845-e320a319f4ba" + workspace_id = "320a319f-4bad-4f94-bc9a-867bc4242666" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_gcs/resource.tf b/examples/resources/airbyte_source_gcs/resource.tf index 8e82e7034..cb82ef849 100755 --- a/examples/resources/airbyte_source_gcs/resource.tf +++ b/examples/resources/airbyte_source_gcs/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_gcs" "my_source_gcs" { service_account = "{ \"type\": \"service_account\", \"project_id\": YOUR_PROJECT_ID, \"private_key_id\": YOUR_PRIVATE_KEY, ... }" source_type = "gcs" } - name = "Darin Mante" + name = "Olga Blanda" secret_id = "...my_secret_id..." - workspace_id = "c9a867bc-4242-4666-9816-ddca8ef51fcb" + workspace_id = "dca8ef51-fcb4-4c59-bec1-2cdaad0ec7af" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_getlago/resource.tf b/examples/resources/airbyte_source_getlago/resource.tf index d19e14bd9..0904859da 100755 --- a/examples/resources/airbyte_source_getlago/resource.tf +++ b/examples/resources/airbyte_source_getlago/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_getlago" "my_source_getlago" { api_key = "...my_api_key..." source_type = "getlago" } - name = "Myra Hills" + name = "Irving Rohan" secret_id = "...my_secret_id..." - workspace_id = "ec12cdaa-d0ec-47af-adbd-80df448a47f9" + workspace_id = "0df448a4-7f93-490c-9888-0983dabf9ef3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_github/resource.tf b/examples/resources/airbyte_source_github/resource.tf index 381e86053..02416c44e 100755 --- a/examples/resources/airbyte_source_github/resource.tf +++ b/examples/resources/airbyte_source_github/resource.tf @@ -9,12 +9,12 @@ resource "airbyte_source_github" "my_source_github" { option_title = "OAuth Credentials" } } - repository = "airbytehq/airbyte airbytehq/another-repo" - requests_per_hour = 6 + repository = "airbytehq/airbyte" + requests_per_hour = 10 source_type = "github" start_date = "2021-03-01T00:00:00Z" } - name = "Brandy Kuvalis V" + name = "Van Kuhlman IV" secret_id = "...my_secret_id..." - workspace_id = "83dabf9e-f3ff-4dd9-b7f0-79af4d35724c" + workspace_id = "9af4d357-24cd-4b0f-8d28-1187d56844ed" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_gitlab/resource.tf b/examples/resources/airbyte_source_gitlab/resource.tf index 0ce500308..c2a93e727 100755 --- a/examples/resources/airbyte_source_gitlab/resource.tf +++ b/examples/resources/airbyte_source_gitlab/resource.tf @@ -8,7 +8,7 @@ resource "airbyte_source_gitlab" "my_source_gitlab" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2022-11-19T07:42:45.478Z" + token_expiry_date = "2021-06-26T03:36:42.239Z" } } groups = "airbyte.io" @@ -16,7 +16,7 @@ resource "airbyte_source_gitlab" "my_source_gitlab" { source_type = "gitlab" start_date = "2021-03-01T00:00:00Z" } - name = "Mr. Jesse Luettgen" + name = "Frank Keeling" secret_id = "...my_secret_id..." - workspace_id = "7d56844e-ded8-45a9-865e-628bdfc2032b" + workspace_id = "628bdfc2-032b-46c8-b992-3b7e13584f7a" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_glassfrog/resource.tf b/examples/resources/airbyte_source_glassfrog/resource.tf index 40e39e10a..919fbfcdd 100755 --- a/examples/resources/airbyte_source_glassfrog/resource.tf +++ b/examples/resources/airbyte_source_glassfrog/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_glassfrog" "my_source_glassfrog" { api_key = "...my_api_key..." source_type = "glassfrog" } - name = "Angelica Langworth" + name = "Carl Davis" secret_id = "...my_secret_id..." - workspace_id = "923b7e13-584f-47ae-92c6-891f82ce1157" + workspace_id = "891f82ce-1157-4172-b053-77dcfa89df97" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_gnews/resource.tf b/examples/resources/airbyte_source_gnews/resource.tf index 7c1d69447..cf35bf01a 100755 --- a/examples/resources/airbyte_source_gnews/resource.tf +++ b/examples/resources/airbyte_source_gnews/resource.tf @@ -1,23 +1,23 @@ resource "airbyte_source_gnews" "my_source_gnews" { configuration = { api_key = "...my_api_key..." - country = "ca" + country = "ie" end_date = "2022-08-21 16:27:09" in = [ - "description", + "content", ] - language = "en" + language = "fr" nullable = [ - "title", + "description", ] - query = "Microsoft Windows 10" + query = "Apple AND NOT iPhone" sortby = "publishedAt" source_type = "gnews" start_date = "2022-08-21 16:27:09" - top_headlines_query = "Apple OR Microsoft" - top_headlines_topic = "technology" + top_headlines_query = "Apple AND NOT iPhone" + top_headlines_topic = "business" } - name = "Mercedes Schiller" + name = "Katrina Considine" secret_id = "...my_secret_id..." - workspace_id = "89df975e-3566-4860-92e9-c3ddc5f111de" + workspace_id = "c3ddc5f1-11de-4a10-a6d5-41a4d190feb2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_ads/resource.tf b/examples/resources/airbyte_source_google_ads/resource.tf index 6ced56af7..325505c0b 100755 --- a/examples/resources/airbyte_source_google_ads/resource.tf +++ b/examples/resources/airbyte_source_google_ads/resource.tf @@ -20,7 +20,7 @@ resource "airbyte_source_google_ads" "my_source_googleads" { source_type = "google-ads" start_date = "2017-01-25" } - name = "Miss Lloyd Funk" + name = "Dr. Forrest Roob" secret_id = "...my_secret_id..." - workspace_id = "d190feb2-1780-4bcc-80db-bddb484708fb" + workspace_id = "bddb4847-08fb-44e3-91e6-bc158c4c4e54" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_analytics_data_api/resource.tf b/examples/resources/airbyte_source_google_analytics_data_api/resource.tf index 73c318477..817811e0b 100755 --- a/examples/resources/airbyte_source_google_analytics_data_api/resource.tf +++ b/examples/resources/airbyte_source_google_analytics_data_api/resource.tf @@ -11,11 +11,11 @@ resource "airbyte_source_google_analytics_data_api" "my_source_googleanalyticsda } custom_reports = "...my_custom_reports..." date_ranges_start_date = "2021-01-01" - property_id = "1738294" + property_id = "5729978930" source_type = "google-analytics-data-api" - window_in_days = 120 + window_in_days = 364 } - name = "Duane Prohaska II" + name = "Juanita Collier" secret_id = "...my_secret_id..." - workspace_id = "8c4c4e54-599e-4a34-a260-e9b200ce78a1" + workspace_id = "0e9b200c-e78a-41bd-8fb7-a0a116ce723d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_analytics_v4/resource.tf b/examples/resources/airbyte_source_google_analytics_v4/resource.tf index 3da684145..013f62889 100755 --- a/examples/resources/airbyte_source_google_analytics_v4/resource.tf +++ b/examples/resources/airbyte_source_google_analytics_v4/resource.tf @@ -15,7 +15,7 @@ resource "airbyte_source_google_analytics_v4" "my_source_googleanalyticsv4" { view_id = "...my_view_id..." window_in_days = 120 } - name = "Miss Harvey Nicolas Sr." + name = "Dr. Doug Dibbert" secret_id = "...my_secret_id..." - workspace_id = "6ce723d4-097f-4a30-a9af-725b29122030" + workspace_id = "af725b29-1220-430d-83f5-aeb7799d22e8" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_directory/resource.tf b/examples/resources/airbyte_source_google_directory/resource.tf index efb4c6232..8e6f6a292 100755 --- a/examples/resources/airbyte_source_google_directory/resource.tf +++ b/examples/resources/airbyte_source_google_directory/resource.tf @@ -4,12 +4,12 @@ resource "airbyte_source_google_directory" "my_source_googledirectory" { source_google_directory_google_credentials_service_account_key = { credentials_json = "...my_credentials_json..." credentials_title = "Service accounts" - email = "Juvenal.Frami64@hotmail.com" + email = "Ayla.Zulauf@hotmail.com" } } source_type = "google-directory" } - name = "Kelvin Kshlerin" + name = "Mrs. Allen Lockman" secret_id = "...my_secret_id..." - workspace_id = "9d22e8c1-f849-4382-9fdc-42c876c2c2df" + workspace_id = "dc42c876-c2c2-4dfb-8cfc-1c76230f841f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf b/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf index a08d6f9f8..fc20bedc4 100755 --- a/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf +++ b/examples/resources/airbyte_source_google_pagespeed_insights/resource.tf @@ -12,7 +12,7 @@ resource "airbyte_source_google_pagespeed_insights" "my_source_googlepagespeedin "...", ] } - name = "Miss Terrell Satterfield" + name = "Kristopher Dare" secret_id = "...my_secret_id..." - workspace_id = "6230f841-fb1b-4d23-bdb1-4db6be5a6859" + workspace_id = "db14db6b-e5a6-4859-98e2-2ae20da16fc2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_search_console/resource.tf b/examples/resources/airbyte_source_google_search_console/resource.tf index fda22b8fa..c4f5686bf 100755 --- a/examples/resources/airbyte_source_google_search_console/resource.tf +++ b/examples/resources/airbyte_source_google_search_console/resource.tf @@ -10,15 +10,23 @@ resource "airbyte_source_google_search_console" "my_source_googlesearchconsole" } } custom_reports = "...my_custom_reports..." - data_state = "final" - end_date = "2021-12-12" + custom_reports_array = [ + { + dimensions = [ + "page", + ] + name = "Heidi Bernier" + }, + ] + data_state = "all" + end_date = "2021-12-12" site_urls = [ "...", ] source_type = "google-search-console" - start_date = "2021-01-01" + start_date = "2022-07-11" } - name = "Janie Swift PhD" + name = "Jordan Hilll" secret_id = "...my_secret_id..." - workspace_id = "a16fc2b2-71a2-489c-97e8-54e90439d222" + workspace_id = "90439d22-2465-4694-a240-7084f7ab37ce" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_sheets/resource.tf b/examples/resources/airbyte_source_google_sheets/resource.tf index ffa21e0f7..0cf171cfe 100755 --- a/examples/resources/airbyte_source_google_sheets/resource.tf +++ b/examples/resources/airbyte_source_google_sheets/resource.tf @@ -9,11 +9,10 @@ resource "airbyte_source_google_sheets" "my_source_googlesheets" { } } names_conversion = true - row_batch_size = 100 source_type = "google-sheets" spreadsheet_id = "https://docs.google.com/spreadsheets/d/1hLd9Qqti3UyLXZB2aFfUWDT7BG-arw2xy4HR3D-dwUb/edit" } - name = "Mae Gleichner" + name = "Irene Davis" secret_id = "...my_secret_id..." - workspace_id = "407084f7-ab37-4cef-8222-5194db55410a" + workspace_id = "194db554-10ad-4c66-9af9-0a26c7cdc981" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_webfonts/resource.tf b/examples/resources/airbyte_source_google_webfonts/resource.tf index d49507686..aa77478b4 100755 --- a/examples/resources/airbyte_source_google_webfonts/resource.tf +++ b/examples/resources/airbyte_source_google_webfonts/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_google_webfonts" "my_source_googlewebfonts" { sort = "...my_sort..." source_type = "google-webfonts" } - name = "Garrett Hoeger" + name = "Donald Hyatt" secret_id = "...my_secret_id..." - workspace_id = "af90a26c-7cdc-4981-b068-981d6bb33cfa" + workspace_id = "81d6bb33-cfaa-4348-831b-f407ee4fcf0c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf b/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf index 07f2e5019..68d8d1824 100755 --- a/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf +++ b/examples/resources/airbyte_source_google_workspace_admin_reports/resource.tf @@ -1,11 +1,11 @@ resource "airbyte_source_google_workspace_admin_reports" "my_source_googleworkspaceadminreports" { configuration = { credentials_json = "...my_credentials_json..." - email = "Daron_Green@hotmail.com" - lookback = 2 + email = "Bridgette_Rohan@gmail.com" + lookback = 10 source_type = "google-workspace-admin-reports" } - name = "Antoinette Wolf IV" + name = "Samantha Huels" secret_id = "...my_secret_id..." - workspace_id = "ee4fcf0c-42b7-48f1-9626-398a0dc76632" + workspace_id = "398a0dc7-6632-44cc-b06c-8ca12d025292" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_greenhouse/resource.tf b/examples/resources/airbyte_source_greenhouse/resource.tf index 70b383883..6d269f662 100755 --- a/examples/resources/airbyte_source_greenhouse/resource.tf +++ b/examples/resources/airbyte_source_greenhouse/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_greenhouse" "my_source_greenhouse" { api_key = "...my_api_key..." source_type = "greenhouse" } - name = "Kendra Schmitt III" + name = "Patricia Pouros" secret_id = "...my_secret_id..." - workspace_id = "c8ca12d0-2529-4270-b8d5-722dd895b8bc" + workspace_id = "5722dd89-5b8b-4cf2-8db9-59693352f745" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_gridly/resource.tf b/examples/resources/airbyte_source_gridly/resource.tf index fd642821a..688646912 100755 --- a/examples/resources/airbyte_source_gridly/resource.tf +++ b/examples/resources/airbyte_source_gridly/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_gridly" "my_source_gridly" { grid_id = "...my_grid_id..." source_type = "gridly" } - name = "Ernest Grimes" + name = "Josephine McCullough" secret_id = "...my_secret_id..." - workspace_id = "95969335-2f74-4533-994d-78de3b6e9389" + workspace_id = "d78de3b6-e938-49f5-abb7-f662550a2838" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_harvest/resource.tf b/examples/resources/airbyte_source_harvest/resource.tf index 58e0b1a54..d86bbd45a 100755 --- a/examples/resources/airbyte_source_harvest/resource.tf +++ b/examples/resources/airbyte_source_harvest/resource.tf @@ -13,7 +13,7 @@ resource "airbyte_source_harvest" "my_source_harvest" { replication_start_date = "2017-01-25T00:00:00Z" source_type = "harvest" } - name = "Harvey Wisoky" + name = "Rodney Orn" secret_id = "...my_secret_id..." - workspace_id = "2550a283-82ac-4483-afd2-315bba650164" + workspace_id = "2315bba6-5016-44e0-af5b-f6ae591bc8bd" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_hubplanner/resource.tf b/examples/resources/airbyte_source_hubplanner/resource.tf index 94f135f83..a1b471fa7 100755 --- a/examples/resources/airbyte_source_hubplanner/resource.tf +++ b/examples/resources/airbyte_source_hubplanner/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_hubplanner" "my_source_hubplanner" { api_key = "...my_api_key..." source_type = "hubplanner" } - name = "Scott Jast" + name = "Cary Emmerich Sr." secret_id = "...my_secret_id..." - workspace_id = "bf6ae591-bc8b-4def-b612-b63c205fda84" + workspace_id = "b63c205f-da84-4077-8a68-a9a35d086b6f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_hubspot/resource.tf b/examples/resources/airbyte_source_hubspot/resource.tf index 361e6f7de..4eaa75fe6 100755 --- a/examples/resources/airbyte_source_hubspot/resource.tf +++ b/examples/resources/airbyte_source_hubspot/resource.tf @@ -11,7 +11,7 @@ resource "airbyte_source_hubspot" "my_source_hubspot" { source_type = "hubspot" start_date = "2017-01-25T00:00:00Z" } - name = "Pauline Paucek" + name = "Mr. Tomas Wisozk DVM" secret_id = "...my_secret_id..." - workspace_id = "a9a35d08-6b6f-466f-af02-0e9f443b4257" + workspace_id = "9f443b42-57b9-492c-8dbd-a6a61efa2198" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_insightly/resource.tf b/examples/resources/airbyte_source_insightly/resource.tf index d6871f115..582ee7125 100755 --- a/examples/resources/airbyte_source_insightly/resource.tf +++ b/examples/resources/airbyte_source_insightly/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_insightly" "my_source_insightly" { start_date = "2021-03-01T00:00:00Z" token = "...my_token..." } - name = "Arturo Marks" + name = "Dana Lindgren" secret_id = "...my_secret_id..." - workspace_id = "8dbda6a6-1efa-4219-8258-fd0a9eba47f7" + workspace_id = "0a9eba47-f7d3-4ef0-8964-0d6a1831c87a" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_instagram/resource.tf b/examples/resources/airbyte_source_instagram/resource.tf index 81b76a21d..53a8e0460 100755 --- a/examples/resources/airbyte_source_instagram/resource.tf +++ b/examples/resources/airbyte_source_instagram/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_instagram" "my_source_instagram" { source_type = "instagram" start_date = "2017-01-25T00:00:00Z" } - name = "Randal Aufderhar" + name = "Mae Hoppe" secret_id = "...my_secret_id..." - workspace_id = "640d6a18-31c8-47ad-b596-fdf1ad837ae8" + workspace_id = "f1ad837a-e80c-41c1-9c95-ba998678fa3f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_instatus/resource.tf b/examples/resources/airbyte_source_instatus/resource.tf index 56c05e515..23ad265ef 100755 --- a/examples/resources/airbyte_source_instatus/resource.tf +++ b/examples/resources/airbyte_source_instatus/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_instatus" "my_source_instatus" { api_key = "...my_api_key..." source_type = "instatus" } - name = "Rosalie Bruen V" + name = "Bobbie Johnston" secret_id = "...my_secret_id..." - workspace_id = "c95ba998-678f-4a3f-a969-91af388ce036" + workspace_id = "1af388ce-0361-4444-8c79-77a0ef2f5360" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_intercom/resource.tf b/examples/resources/airbyte_source_intercom/resource.tf index e2157ff76..9ebb6836e 100755 --- a/examples/resources/airbyte_source_intercom/resource.tf +++ b/examples/resources/airbyte_source_intercom/resource.tf @@ -1,10 +1,12 @@ resource "airbyte_source_intercom" "my_source_intercom" { configuration = { - access_token = "...my_access_token..." - source_type = "intercom" - start_date = "2020-11-16T00:00:00Z" + access_token = "...my_access_token..." + client_id = "...my_client_id..." + client_secret = "...my_client_secret..." + source_type = "intercom" + start_date = "2020-11-16T00:00:00Z" } - name = "Emma Lueilwitz" + name = "Darnell Watsica" secret_id = "...my_secret_id..." - workspace_id = "977a0ef2-f536-4028-afee-f934152ed7e2" + workspace_id = "934152ed-7e25-43f4-8157-deaa7170f445" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_ip2whois/resource.tf b/examples/resources/airbyte_source_ip2whois/resource.tf index 60efc208c..0dbb1423d 100755 --- a/examples/resources/airbyte_source_ip2whois/resource.tf +++ b/examples/resources/airbyte_source_ip2whois/resource.tf @@ -1,10 +1,10 @@ resource "airbyte_source_ip2whois" "my_source_ip2whois" { configuration = { api_key = "...my_api_key..." - domain = "www.google.com" + domain = "www.facebook.com" source_type = "ip2whois" } - name = "Lucia Gorczany II" + name = "Leland Wisoky" secret_id = "...my_secret_id..." - workspace_id = "7deaa717-0f44-45ac-8f66-7aaf9bbad185" + workspace_id = "7aaf9bba-d185-4fe4-b1d6-bf5c838fbb8c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_iterable/resource.tf b/examples/resources/airbyte_source_iterable/resource.tf index 24403f403..6482f729a 100755 --- a/examples/resources/airbyte_source_iterable/resource.tf +++ b/examples/resources/airbyte_source_iterable/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_iterable" "my_source_iterable" { source_type = "iterable" start_date = "2021-04-01T00:00:00Z" } - name = "Peggy Bergstrom" + name = "Archie Jaskolski" secret_id = "...my_secret_id..." - workspace_id = "bf5c838f-bb8c-420c-b67f-c4b425e99e62" + workspace_id = "c4b425e9-9e62-434c-9f7b-79dfeb77a5c3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_jira/resource.tf b/examples/resources/airbyte_source_jira/resource.tf index 3765879ab..a8ae76693 100755 --- a/examples/resources/airbyte_source_jira/resource.tf +++ b/examples/resources/airbyte_source_jira/resource.tf @@ -1,10 +1,10 @@ resource "airbyte_source_jira" "my_source_jira" { configuration = { api_token = "...my_api_token..." - domain = ".atlassian.net" - email = "Ottilie.McCullough73@gmail.com" + domain = ".jira.com" + email = "Eldridge_Reichert@hotmail.com" enable_experimental_streams = false - expand_issue_changelog = true + expand_issue_changelog = false projects = [ "...", ] @@ -12,7 +12,7 @@ resource "airbyte_source_jira" "my_source_jira" { source_type = "jira" start_date = "2021-03-01T00:00:00Z" } - name = "Mathew Klocko" + name = "Olive Windler" secret_id = "...my_secret_id..." - workspace_id = "c38d4baf-91e5-406e-b890-a54b475f16f5" + workspace_id = "0a54b475-f16f-456d-b85a-3c4ac631b99e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_k6_cloud/resource.tf b/examples/resources/airbyte_source_k6_cloud/resource.tf index f05141b8e..0e9669348 100755 --- a/examples/resources/airbyte_source_k6_cloud/resource.tf +++ b/examples/resources/airbyte_source_k6_cloud/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_k6_cloud" "my_source_k6cloud" { api_token = "...my_api_token..." source_type = "k6-cloud" } - name = "Marcella Dooley" + name = "Ella Runolfsdottir" secret_id = "...my_secret_id..." - workspace_id = "a3c4ac63-1b99-4e26-8ed8-f9fdb9410f63" + workspace_id = "8f9fdb94-10f6-43bb-b817-837b01afdd78" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_klarna/resource.tf b/examples/resources/airbyte_source_klarna/resource.tf index e4833c057..512bb119f 100755 --- a/examples/resources/airbyte_source_klarna/resource.tf +++ b/examples/resources/airbyte_source_klarna/resource.tf @@ -1,12 +1,12 @@ resource "airbyte_source_klarna" "my_source_klarna" { configuration = { password = "...my_password..." - playground = false - region = "oc" + playground = true + region = "us" source_type = "klarna" - username = "Tristin47" + username = "Chase50" } - name = "Jacob Krajcik Sr." + name = "Caleb Rau" secret_id = "...my_secret_id..." - workspace_id = "afdd7886-2418-49eb-8487-3f5033f19dbf" + workspace_id = "873f5033-f19d-4bf1-a5ce-4152eab9cd7e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_klaviyo/resource.tf b/examples/resources/airbyte_source_klaviyo/resource.tf index 6e664a8cd..1367f888c 100755 --- a/examples/resources/airbyte_source_klaviyo/resource.tf +++ b/examples/resources/airbyte_source_klaviyo/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_klaviyo" "my_source_klaviyo" { source_type = "klaviyo" start_date = "2017-01-25T00:00:00Z" } - name = "Della Trantow II" + name = "Charlotte Muller" secret_id = "...my_secret_id..." - workspace_id = "2eab9cd7-e522-44a6-a0e1-23b7847ec59e" + workspace_id = "0e123b78-47ec-459e-9f67-f3c4cce4b6d7" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_kustomer_singer/resource.tf b/examples/resources/airbyte_source_kustomer_singer/resource.tf index cfd0cef54..c4813b3f9 100755 --- a/examples/resources/airbyte_source_kustomer_singer/resource.tf +++ b/examples/resources/airbyte_source_kustomer_singer/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_kustomer_singer" "my_source_kustomersinger" { source_type = "kustomer-singer" start_date = "2019-01-01T00:00:00Z" } - name = "Camille Johnston" + name = "Bobbie Jacobs" secret_id = "...my_secret_id..." - workspace_id = "3c4cce4b-6d76-496f-b3c5-747501357e44" + workspace_id = "3c574750-1357-4e44-b51f-8b084c3197e1" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_kyve/resource.tf b/examples/resources/airbyte_source_kyve/resource.tf index 0f992ee18..f9f9e84a0 100755 --- a/examples/resources/airbyte_source_kyve/resource.tf +++ b/examples/resources/airbyte_source_kyve/resource.tf @@ -1,13 +1,13 @@ resource "airbyte_source_kyve" "my_source_kyve" { configuration = { - max_pages = 10 - page_size = 4 - pool_ids = "0" + max_pages = 6 + page_size = 2 + pool_ids = "0,1" source_type = "kyve" - start_ids = "0,0" - url_base = "https://api.beta.kyve.network/" + start_ids = "0" + url_base = "https://api.korellia.kyve.network/" } - name = "William Larson" + name = "Gail Homenick" secret_id = "...my_secret_id..." - workspace_id = "3197e193-a245-4467-b948-74c2d5cc4972" + workspace_id = "94874c2d-5cc4-4972-a33e-66bd8fe5d00b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_launchdarkly/resource.tf b/examples/resources/airbyte_source_launchdarkly/resource.tf index 8a5c03dbd..c6a3e6f9a 100755 --- a/examples/resources/airbyte_source_launchdarkly/resource.tf +++ b/examples/resources/airbyte_source_launchdarkly/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_launchdarkly" "my_source_launchdarkly" { access_token = "...my_access_token..." source_type = "launchdarkly" } - name = "Edith Dickens" + name = "Darren Monahan" secret_id = "...my_secret_id..." - workspace_id = "6bd8fe5d-00b9-479e-b203-87320590ccc1" + workspace_id = "20387320-590c-4cc1-8964-00313b3e5044" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_lemlist/resource.tf b/examples/resources/airbyte_source_lemlist/resource.tf index fc906cbe6..f30eccca4 100755 --- a/examples/resources/airbyte_source_lemlist/resource.tf +++ b/examples/resources/airbyte_source_lemlist/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_lemlist" "my_source_lemlist" { api_key = "...my_api_key..." source_type = "lemlist" } - name = "Erika Jaskolski Jr." + name = "Gene Herman" secret_id = "...my_secret_id..." - workspace_id = "313b3e50-44f6-45fe-b2dc-4077d0cc3f40" + workspace_id = "72dc4077-d0cc-43f4-88ef-c15ceb4d6e1e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_lever_hiring/resource.tf b/examples/resources/airbyte_source_lever_hiring/resource.tf index 93f7c54f8..62eb79581 100755 --- a/examples/resources/airbyte_source_lever_hiring/resource.tf +++ b/examples/resources/airbyte_source_lever_hiring/resource.tf @@ -10,7 +10,7 @@ resource "airbyte_source_lever_hiring" "my_source_leverhiring" { source_type = "lever-hiring" start_date = "2021-03-01T00:00:00Z" } - name = "Mrs. Amos Ryan" + name = "Donald Wuckert" secret_id = "...my_secret_id..." - workspace_id = "eb4d6e1e-ae0f-475a-adf2-acab58b991c9" + workspace_id = "aedf2aca-b58b-4991-8926-ddb589461e74" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_linkedin_ads/resource.tf b/examples/resources/airbyte_source_linkedin_ads/resource.tf index c11523058..cf38a588a 100755 --- a/examples/resources/airbyte_source_linkedin_ads/resource.tf +++ b/examples/resources/airbyte_source_linkedin_ads/resource.tf @@ -5,8 +5,8 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" { ] ad_analytics_reports = [ { - name = "Mable Stroman" - pivot_by = "MEMBER_COMPANY_SIZE" + name = "Kara Rohan" + pivot_by = "MEMBER_REGION_V2" time_granularity = "MONTHLY" }, ] @@ -19,7 +19,7 @@ resource "airbyte_source_linkedin_ads" "my_source_linkedinads" { source_type = "linkedin-ads" start_date = "2021-05-17" } - name = "Leigh Kuhic" + name = "Elsa Adams" secret_id = "...my_secret_id..." - workspace_id = "1cbe6d95-02f0-4ea9-b0b6-9f7ac2f72f88" + workspace_id = "930b69f7-ac2f-472f-8850-090491160820" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_linkedin_pages/resource.tf b/examples/resources/airbyte_source_linkedin_pages/resource.tf index 9d942ead1..60a09bbc6 100755 --- a/examples/resources/airbyte_source_linkedin_pages/resource.tf +++ b/examples/resources/airbyte_source_linkedin_pages/resource.tf @@ -9,7 +9,7 @@ resource "airbyte_source_linkedin_pages" "my_source_linkedinpages" { org_id = "123456789" source_type = "linkedin-pages" } - name = "Karen Barrows I" + name = "Tracey Kutch" secret_id = "...my_secret_id..." - workspace_id = "91160820-7888-4ec6-a183-bfe9659eb40e" + workspace_id = "c66183bf-e965-49eb-80ec-16faf75b0b53" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_linnworks/resource.tf b/examples/resources/airbyte_source_linnworks/resource.tf index f786ff86a..72d488706 100755 --- a/examples/resources/airbyte_source_linnworks/resource.tf +++ b/examples/resources/airbyte_source_linnworks/resource.tf @@ -3,10 +3,10 @@ resource "airbyte_source_linnworks" "my_source_linnworks" { application_id = "...my_application_id..." application_secret = "...my_application_secret..." source_type = "linnworks" - start_date = "2022-08-21T08:36:18.969Z" + start_date = "2022-05-04T07:21:12.859Z" token = "...my_token..." } - name = "Lana Osinski" + name = "Antonia Muller" secret_id = "...my_secret_id..." - workspace_id = "5b0b532a-4da3-47cb-aaf4-452c4842c9b2" + workspace_id = "cbaaf445-2c48-442c-9b2a-d32dafe81a88" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_lokalise/resource.tf b/examples/resources/airbyte_source_lokalise/resource.tf index 3b3f9ddee..f14863c27 100755 --- a/examples/resources/airbyte_source_lokalise/resource.tf +++ b/examples/resources/airbyte_source_lokalise/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_lokalise" "my_source_lokalise" { project_id = "...my_project_id..." source_type = "lokalise" } - name = "Al Emmerich" + name = "Bernard Gottlieb" secret_id = "...my_secret_id..." - workspace_id = "afe81a88-f444-4457-bfec-d47353f63c82" + workspace_id = "573fecd4-7353-4f63-8820-9379aa69cd5f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mailchimp/resource.tf b/examples/resources/airbyte_source_mailchimp/resource.tf index 8d1992bc8..aa2a83969 100755 --- a/examples/resources/airbyte_source_mailchimp/resource.tf +++ b/examples/resources/airbyte_source_mailchimp/resource.tf @@ -9,7 +9,7 @@ resource "airbyte_source_mailchimp" "my_source_mailchimp" { } source_type = "mailchimp" } - name = "Faye Dicki" + name = "Benny Williamson" secret_id = "...my_secret_id..." - workspace_id = "aa69cd5f-bcf7-49da-98a7-822bf95894e6" + workspace_id = "da18a782-2bf9-4589-8e68-61adb55f9e5d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mailgun/resource.tf b/examples/resources/airbyte_source_mailgun/resource.tf index 535216280..351a2840a 100755 --- a/examples/resources/airbyte_source_mailgun/resource.tf +++ b/examples/resources/airbyte_source_mailgun/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_mailgun" "my_source_mailgun" { source_type = "mailgun" start_date = "2023-08-01T00:00:00Z" } - name = "Lynda Schuppe" + name = "Sheri Mayert" secret_id = "...my_secret_id..." - workspace_id = "5f9e5d75-1c9f-4e8f-b502-bfdc3450841f" + workspace_id = "8f7502bf-dc34-4508-81f1-764456379f3f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mailjet_sms/resource.tf b/examples/resources/airbyte_source_mailjet_sms/resource.tf index 076fbe85f..0b4b039c5 100755 --- a/examples/resources/airbyte_source_mailjet_sms/resource.tf +++ b/examples/resources/airbyte_source_mailjet_sms/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_mailjet_sms" "my_source_mailjetsms" { start_date = 1666261656 token = "...my_token..." } - name = "Eleanor Gibson" + name = "Dr. Eloise Cronin" secret_id = "...my_secret_id..." - workspace_id = "379f3fb2-7e21-4f86-a657-b36fc6b9f587" + workspace_id = "62657b36-fc6b-49f5-87ce-525c67641a83" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_marketo/resource.tf b/examples/resources/airbyte_source_marketo/resource.tf index 2470ef347..52094c4a2 100755 --- a/examples/resources/airbyte_source_marketo/resource.tf +++ b/examples/resources/airbyte_source_marketo/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_marketo" "my_source_marketo" { source_type = "marketo" start_date = "2020-09-25T00:00:00Z" } - name = "Sara Hegmann" + name = "Jerome Berge" secret_id = "...my_secret_id..." - workspace_id = "7641a831-2e50-447b-8c21-ccb423abcdc9" + workspace_id = "b4c21ccb-423a-4bcd-891f-aabdd88e71f6" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_metabase/resource.tf b/examples/resources/airbyte_source_metabase/resource.tf index 94b7c2d5b..8e677b990 100755 --- a/examples/resources/airbyte_source_metabase/resource.tf +++ b/examples/resources/airbyte_source_metabase/resource.tf @@ -4,9 +4,9 @@ resource "airbyte_source_metabase" "my_source_metabase" { password = "...my_password..." session_token = "...my_session_token..." source_type = "metabase" - username = "Audrey_Weimann71" + username = "Peyton.Green" } - name = "Jody Lebsack" + name = "Tammy Sporer" secret_id = "...my_secret_id..." - workspace_id = "71f6c482-52d7-4771-a7fd-074009ef8d29" + workspace_id = "71e7fd07-4009-4ef8-929d-e1dd7097b5da" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_microsoft_teams/resource.tf b/examples/resources/airbyte_source_microsoft_teams/resource.tf index aca96fc73..c1781a88b 100755 --- a/examples/resources/airbyte_source_microsoft_teams/resource.tf +++ b/examples/resources/airbyte_source_microsoft_teams/resource.tf @@ -11,7 +11,7 @@ resource "airbyte_source_microsoft_teams" "my_source_microsoftteams" { period = "D7" source_type = "microsoft-teams" } - name = "Noah Bernier" + name = "Brandy Ryan" secret_id = "...my_secret_id..." - workspace_id = "7097b5da-08c5-47fa-ac78-a216e19bafec" + workspace_id = "fa6c78a2-16e1-49ba-beca-6191498140b6" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mixpanel/resource.tf b/examples/resources/airbyte_source_mixpanel/resource.tf index 0acfc0101..0e0998645 100755 --- a/examples/resources/airbyte_source_mixpanel/resource.tf +++ b/examples/resources/airbyte_source_mixpanel/resource.tf @@ -1,22 +1,22 @@ resource "airbyte_source_mixpanel" "my_source_mixpanel" { configuration = { - attribution_window = 7 + attribution_window = 2 credentials = { source_mixpanel_authentication_wildcard_project_secret = { api_secret = "...my_api_secret..." option_title = "Project Secret" } } - date_window_size = 4 + date_window_size = 10 end_date = "2021-11-16" - project_id = 0 - project_timezone = "US/Pacific" - region = "EU" + project_id = 7 + project_timezone = "UTC" + region = "US" select_properties_by_default = true source_type = "mixpanel" start_date = "2021-11-16" } - name = "Karla Hoppe" + name = "Donald Ernser" secret_id = "...my_secret_id..." - workspace_id = "f8ae170e-f03b-45f3-be4a-a86855596673" + workspace_id = "f37e4aa8-6855-4596-a732-aa5dcb6682cb" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_monday/resource.tf b/examples/resources/airbyte_source_monday/resource.tf index 73273b7f0..ffbd6efed 100755 --- a/examples/resources/airbyte_source_monday/resource.tf +++ b/examples/resources/airbyte_source_monday/resource.tf @@ -8,7 +8,7 @@ resource "airbyte_source_monday" "my_source_monday" { } source_type = "monday" } - name = "Lynda Padberg" + name = "Shirley Wisoky" secret_id = "...my_secret_id..." - workspace_id = "cb6682cb-70f8-4cfd-9fb6-e91b9a9f7484" + workspace_id = "fd5fb6e9-1b9a-49f7-8846-e2c3309db053" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mongodb/resource.tf b/examples/resources/airbyte_source_mongodb/resource.tf index e3a100fba..6fdbef3dc 100755 --- a/examples/resources/airbyte_source_mongodb/resource.tf +++ b/examples/resources/airbyte_source_mongodb/resource.tf @@ -12,7 +12,7 @@ resource "airbyte_source_mongodb" "my_source_mongodb" { source_type = "mongodb" user = "...my_user..." } - name = "Elvira Collins" + name = "Doreen Mayer" secret_id = "...my_secret_id..." - workspace_id = "309db053-6d9e-475c-a006-f5392c11a25a" + workspace_id = "5ca006f5-392c-411a-a5a8-bf92f97428ad" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf b/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf index f8c428526..600385cb2 100755 --- a/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf +++ b/examples/resources/airbyte_source_mongodb_internal_poc/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_mongodb_internal_poc" "my_source_mongodbinternalpoc" { source_type = "mongodb-internal-poc" user = "...my_user..." } - name = "Dr. Cary McKenzie" + name = "Eduardo Weissnat" secret_id = "...my_secret_id..." - workspace_id = "7428ad9a-9f8b-4f82-a112-5359d98387f7" + workspace_id = "f8221125-359d-4983-87f7-a79cd72cd248" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mssql/resource.tf b/examples/resources/airbyte_source_mssql/resource.tf index 0fdf6d78e..215a27ace 100755 --- a/examples/resources/airbyte_source_mssql/resource.tf +++ b/examples/resources/airbyte_source_mssql/resource.tf @@ -6,11 +6,11 @@ resource "airbyte_source_mssql" "my_source_mssql" { password = "...my_password..." port = 1433 replication_method = { - source_mssql_replication_method_logical_replication_cdc_ = { - data_to_sync = "Existing and New" - initial_waiting_seconds = 6 + source_mssql_update_method_read_changes_using_change_data_capture_cdc_ = { + data_to_sync = "New Changes Only" + initial_waiting_seconds = 7 method = "CDC" - snapshot_isolation = "Read Committed" + snapshot_isolation = "Snapshot" } } schemas = [ @@ -27,9 +27,9 @@ resource "airbyte_source_mssql" "my_source_mssql" { tunnel_method = "NO_TUNNEL" } } - username = "Rhianna75" + username = "Bobbie60" } - name = "Victor Gleason" + name = "Clarence Murazik" secret_id = "...my_secret_id..." - workspace_id = "da21729f-2ac4-41ef-9725-f1169ac1e41d" + workspace_id = "1ef5725f-1169-4ac1-a41d-8a23c23e34f2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_my_hours/resource.tf b/examples/resources/airbyte_source_my_hours/resource.tf index 84e536c83..f07121f85 100755 --- a/examples/resources/airbyte_source_my_hours/resource.tf +++ b/examples/resources/airbyte_source_my_hours/resource.tf @@ -4,9 +4,9 @@ resource "airbyte_source_my_hours" "my_source_myhours" { logs_batch_size = 30 password = "...my_password..." source_type = "my-hours" - start_date = "%Y-%m-%d" + start_date = "2016-01-01" } - name = "Allen Grant" + name = "Elsa Kerluke" secret_id = "...my_secret_id..." - workspace_id = "dfa4a197-f6de-4922-951f-e1712099853e" + workspace_id = "922151fe-1712-4099-853e-9f543d854439" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_mysql/resource.tf b/examples/resources/airbyte_source_mysql/resource.tf index ad3998214..06257c92b 100755 --- a/examples/resources/airbyte_source_mysql/resource.tf +++ b/examples/resources/airbyte_source_mysql/resource.tf @@ -23,9 +23,9 @@ resource "airbyte_source_mysql" "my_source_mysql" { tunnel_method = "NO_TUNNEL" } } - username = "Eusebio86" + username = "Carley25" } - name = "Herman Greenfelder" + name = "Ruth Goodwin" secret_id = "...my_secret_id..." - workspace_id = "9ee22446-0443-4bc1-9418-8c2f56e85da7" + workspace_id = "bc154188-c2f5-46e8-9da7-832eabd617c3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_netsuite/resource.tf b/examples/resources/airbyte_source_netsuite/resource.tf index 581914faa..7f15f2723 100755 --- a/examples/resources/airbyte_source_netsuite/resource.tf +++ b/examples/resources/airbyte_source_netsuite/resource.tf @@ -10,9 +10,9 @@ resource "airbyte_source_netsuite" "my_source_netsuite" { start_datetime = "2017-01-25T00:00:00Z" token_key = "...my_token_key..." token_secret = "...my_token_secret..." - window_in_days = 5 + window_in_days = 7 } - name = "Judy Towne" + name = "Miss Meredith Hand" secret_id = "...my_secret_id..." - workspace_id = "d617c3b0-d51a-444b-b01b-ad8706d46082" + workspace_id = "4bf01bad-8706-4d46-882b-fbdc41ff5d4e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_notion/resource.tf b/examples/resources/airbyte_source_notion/resource.tf index eb1aa1c93..a197d16f1 100755 --- a/examples/resources/airbyte_source_notion/resource.tf +++ b/examples/resources/airbyte_source_notion/resource.tf @@ -9,7 +9,7 @@ resource "airbyte_source_notion" "my_source_notion" { source_type = "notion" start_date = "2020-11-16T00:00:00.000Z" } - name = "Dewey Schmeler DVM" + name = "Francisco Yost" secret_id = "...my_secret_id..." - workspace_id = "f5d4e2ae-4fb5-4cb3-9d17-638f1edb7835" + workspace_id = "cb35d176-38f1-4edb-b835-9ecc5cb860f8" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_nytimes/resource.tf b/examples/resources/airbyte_source_nytimes/resource.tf index 96ee648d7..e6a87e50d 100755 --- a/examples/resources/airbyte_source_nytimes/resource.tf +++ b/examples/resources/airbyte_source_nytimes/resource.tf @@ -2,12 +2,12 @@ resource "airbyte_source_nytimes" "my_source_nytimes" { configuration = { api_key = "...my_api_key..." end_date = "1851-01" - period = "30" + period = "7" share_type = "facebook" source_type = "nytimes" start_date = "2022-08" } - name = "Cecil Bauch" + name = "Mr. Emily Macejkovic" secret_id = "...my_secret_id..." - workspace_id = "cd580ba7-3810-4e4f-a444-7297cd3b1dd3" + workspace_id = "4fe44472-97cd-43b1-9d3b-bce247b7684e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_okta/resource.tf b/examples/resources/airbyte_source_okta/resource.tf index f9baa9d2a..68b9ac0e9 100755 --- a/examples/resources/airbyte_source_okta/resource.tf +++ b/examples/resources/airbyte_source_okta/resource.tf @@ -10,7 +10,7 @@ resource "airbyte_source_okta" "my_source_okta" { source_type = "okta" start_date = "2022-07-22T00:00:00Z" } - name = "Randolph Russel" + name = "Mr. Emmett Heidenreich" secret_id = "...my_secret_id..." - workspace_id = "47b7684e-ff50-4126-971c-ffbd0eb74b84" + workspace_id = "6d71cffb-d0eb-474b-8421-953b44bd3c43" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_omnisend/resource.tf b/examples/resources/airbyte_source_omnisend/resource.tf index 0006273e2..cfbc86e3a 100755 --- a/examples/resources/airbyte_source_omnisend/resource.tf +++ b/examples/resources/airbyte_source_omnisend/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_omnisend" "my_source_omnisend" { api_key = "...my_api_key..." source_type = "omnisend" } - name = "Virginia Mitchell" + name = "Lynn Miller" secret_id = "...my_secret_id..." - workspace_id = "b44bd3c4-3159-4d33-a595-3c001139863a" + workspace_id = "3e5953c0-0113-4986-baa4-1e6c31cc2f1f" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_onesignal/resource.tf b/examples/resources/airbyte_source_onesignal/resource.tf index 844dca5bb..feb38ab1a 100755 --- a/examples/resources/airbyte_source_onesignal/resource.tf +++ b/examples/resources/airbyte_source_onesignal/resource.tf @@ -12,7 +12,7 @@ resource "airbyte_source_onesignal" "my_source_onesignal" { start_date = "2020-11-16T00:00:00Z" user_auth_key = "...my_user_auth_key..." } - name = "Laverne Jacobs" + name = "Joan Schaefer" secret_id = "...my_secret_id..." - workspace_id = "1cc2f1fc-b51c-49a4-9ffb-e9cbd795ee65" + workspace_id = "41ffbe9c-bd79-45ee-a5e0-76cc7abf616e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_openweather/resource.tf b/examples/resources/airbyte_source_openweather/resource.tf deleted file mode 100755 index 2a1a58701..000000000 --- a/examples/resources/airbyte_source_openweather/resource.tf +++ /dev/null @@ -1,13 +0,0 @@ -resource "airbyte_source_openweather" "my_source_openweather" { - configuration = { - appid = "...my_appid..." - lang = "zh_tw" - lat = "45.7603" - lon = "4.835659" - source_type = "openweather" - units = "metric" - } - name = "Sylvester Krajcik" - secret_id = "...my_secret_id..." - workspace_id = "f616ea5c-7164-4193-8b90-f2e09d19d2fc" -} \ No newline at end of file diff --git a/examples/resources/airbyte_source_oracle/resource.tf b/examples/resources/airbyte_source_oracle/resource.tf index 106d5bef6..457d7de4a 100755 --- a/examples/resources/airbyte_source_oracle/resource.tf +++ b/examples/resources/airbyte_source_oracle/resource.tf @@ -8,14 +8,14 @@ resource "airbyte_source_oracle" "my_source_oracle" { } encryption = { source_oracle_encryption_native_network_encryption_nne_ = { - encryption_algorithm = "AES256" + encryption_algorithm = "RC4_56" encryption_method = "client_nne" } } host = "...my_host..." jdbc_url_params = "...my_jdbc_url_params..." password = "...my_password..." - port = 10 + port = 4 schemas = [ "...", ] @@ -25,9 +25,9 @@ resource "airbyte_source_oracle" "my_source_oracle" { tunnel_method = "NO_TUNNEL" } } - username = "Lila92" + username = "Oswaldo42" } - name = "Barbara Hilll" + name = "Cheryl McKenzie" secret_id = "...my_secret_id..." - workspace_id = "4b935d23-7a72-4f90-849d-6aed4aecb753" + workspace_id = "b90f2e09-d19d-42fc-af9e-2e105944b935" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_orb/resource.tf b/examples/resources/airbyte_source_orb/resource.tf index 70981f041..5a3a772ce 100755 --- a/examples/resources/airbyte_source_orb/resource.tf +++ b/examples/resources/airbyte_source_orb/resource.tf @@ -1,7 +1,7 @@ resource "airbyte_source_orb" "my_source_orb" { configuration = { api_key = "...my_api_key..." - lookback_window_days = 5 + lookback_window_days = 9 numeric_event_properties_keys = [ "...", ] @@ -13,7 +13,7 @@ resource "airbyte_source_orb" "my_source_orb" { ] subscription_usage_grouping_key = "...my_subscription_usage_grouping_key..." } - name = "Josh Mante" + name = "Josephine Kilback" secret_id = "...my_secret_id..." - workspace_id = "2c9ff574-91aa-4bfa-ae76-1f0ca4d456ef" + workspace_id = "2f90849d-6aed-44ae-8b75-37cd9222c9ff" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_orbit/resource.tf b/examples/resources/airbyte_source_orbit/resource.tf index 385976528..0564ed716 100755 --- a/examples/resources/airbyte_source_orbit/resource.tf +++ b/examples/resources/airbyte_source_orbit/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_orbit" "my_source_orbit" { start_date = "...my_start_date..." workspace = "...my_workspace..." } - name = "Dr. Nancy Ferry" + name = "Jo Greenholt V" secret_id = "...my_secret_id..." - workspace_id = "899f0c20-01e2-42cd-95cc-0584a184d76d" + workspace_id = "abfa2e76-1f0c-4a4d-856e-f1031e6899f0" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_outbrain_amplify/resource.tf b/examples/resources/airbyte_source_outbrain_amplify/resource.tf index ea5b0896a..3d11879df 100755 --- a/examples/resources/airbyte_source_outbrain_amplify/resource.tf +++ b/examples/resources/airbyte_source_outbrain_amplify/resource.tf @@ -7,12 +7,12 @@ resource "airbyte_source_outbrain_amplify" "my_source_outbrainamplify" { } } end_date = "...my_end_date..." - geo_location_breakdown = "region" - report_granularity = "weekly" + geo_location_breakdown = "subregion" + report_granularity = "daily" source_type = "outbrain-amplify" start_date = "...my_start_date..." } - name = "Johanna Runolfsson Jr." + name = "Cynthia Boyer" secret_id = "...my_secret_id..." - workspace_id = "c65b037b-b8e0-4cc8-8518-7e4de04af28c" + workspace_id = "2cd55cc0-584a-4184-976d-971fc820c65b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_outreach/resource.tf b/examples/resources/airbyte_source_outreach/resource.tf index c6a379c69..379b400ed 100755 --- a/examples/resources/airbyte_source_outreach/resource.tf +++ b/examples/resources/airbyte_source_outreach/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_outreach" "my_source_outreach" { source_type = "outreach" start_date = "2020-11-16T00:00:00Z" } - name = "Rosemarie Schulist" + name = "Kim Kirlin" secret_id = "...my_secret_id..." - workspace_id = "46aa1cfd-6d82-48da-8131-91129646645c" + workspace_id = "8e0cc885-187e-44de-84af-28c5dddb46aa" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_paypal_transaction/resource.tf b/examples/resources/airbyte_source_paypal_transaction/resource.tf index a659bfb9d..9f3533fc0 100755 --- a/examples/resources/airbyte_source_paypal_transaction/resource.tf +++ b/examples/resources/airbyte_source_paypal_transaction/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_paypal_transaction" "my_source_paypaltransaction" { source_type = "paypal-transaction" start_date = "2021-06-11T23:59:59+00:00" } - name = "Mrs. Roy Moore" + name = "Ernestine Little" secret_id = "...my_secret_id..." - workspace_id = "f569b7af-f0ea-4221-acbe-071bc163e279" + workspace_id = "da013191-1296-4466-85c1-d81f29042f56" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_paystack/resource.tf b/examples/resources/airbyte_source_paystack/resource.tf index 8e09a9c7e..dbca94488 100755 --- a/examples/resources/airbyte_source_paystack/resource.tf +++ b/examples/resources/airbyte_source_paystack/resource.tf @@ -1,11 +1,11 @@ resource "airbyte_source_paystack" "my_source_paystack" { configuration = { - lookback_window_days = 7 + lookback_window_days = 6 secret_key = "...my_secret_key..." source_type = "paystack" start_date = "2017-01-25T00:00:00Z" } - name = "Mattie Gutkowski" + name = "Dr. Boyd Wilderman" secret_id = "...my_secret_id..." - workspace_id = "99257d04-f408-447a-b42d-84496cbdeecf" + workspace_id = "2216cbe0-71bc-4163-a279-a3b084da9925" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pendo/resource.tf b/examples/resources/airbyte_source_pendo/resource.tf index 6bc92ecd3..d4cc1314e 100755 --- a/examples/resources/airbyte_source_pendo/resource.tf +++ b/examples/resources/airbyte_source_pendo/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_pendo" "my_source_pendo" { api_key = "...my_api_key..." source_type = "pendo" } - name = "Bridget Medhurst" + name = "Estelle Bechtelar" secret_id = "...my_secret_id..." - workspace_id = "c63562eb-fdf5-45c2-94c0-60b06a128776" + workspace_id = "40847a74-2d84-4496-8bde-ecf6b99bc635" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_persistiq/resource.tf b/examples/resources/airbyte_source_persistiq/resource.tf index 067b8efef..54a89064e 100755 --- a/examples/resources/airbyte_source_persistiq/resource.tf +++ b/examples/resources/airbyte_source_persistiq/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_persistiq" "my_source_persistiq" { api_key = "...my_api_key..." source_type = "persistiq" } - name = "Silvia Toy" + name = "Nicole Vandervort" secret_id = "...my_secret_id..." - workspace_id = "d0c6d6ed-9c73-4dd6-b457-1509a8e870d3" + workspace_id = "df55c294-c060-4b06-a128-7764eef6d0c6" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pexels_api/resource.tf b/examples/resources/airbyte_source_pexels_api/resource.tf index fab262a41..18312ed96 100755 --- a/examples/resources/airbyte_source_pexels_api/resource.tf +++ b/examples/resources/airbyte_source_pexels_api/resource.tf @@ -4,11 +4,11 @@ resource "airbyte_source_pexels_api" "my_source_pexelsapi" { color = "orange" locale = "en-US" orientation = "landscape" - query = "people" + query = "oceans" size = "small" source_type = "pexels-api" } - name = "Wilbert Cummings" + name = "Arnold Dooley" secret_id = "...my_secret_id..." - workspace_id = "c7b66a1f-30c7-43df-9b67-19890f42a4bb" + workspace_id = "63457150-9a8e-4870-93c5-a1f9c242c7b6" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pinterest/resource.tf b/examples/resources/airbyte_source_pinterest/resource.tf index 96c098f37..76a69a918 100755 --- a/examples/resources/airbyte_source_pinterest/resource.tf +++ b/examples/resources/airbyte_source_pinterest/resource.tf @@ -9,10 +9,10 @@ resource "airbyte_source_pinterest" "my_source_pinterest" { source_type = "pinterest" start_date = "2022-07-28" status = [ - "PAUSED", + "ACTIVE", ] } - name = "Jordan Hegmann" + name = "Nathan Bauch" secret_id = "...my_secret_id..." - workspace_id = "60591d74-5e3c-4205-9c9c-3f567e0e2527" + workspace_id = "3df5b671-9890-4f42-a4bb-438d85b26059" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pipedrive/resource.tf b/examples/resources/airbyte_source_pipedrive/resource.tf index 252fb8a7f..7ca558813 100755 --- a/examples/resources/airbyte_source_pipedrive/resource.tf +++ b/examples/resources/airbyte_source_pipedrive/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_pipedrive" "my_source_pipedrive" { replication_start_date = "2017-01-25T00:00:00Z" source_type = "pipedrive" } - name = "Albert Sipes" + name = "Rhonda Hammes" secret_id = "...my_secret_id..." - workspace_id = "fcdace1f-0121-46ce-a239-e8f25cd0d19d" + workspace_id = "c2059c9c-3f56-47e0-a252-765b1d62fcda" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pocket/resource.tf b/examples/resources/airbyte_source_pocket/resource.tf index 9116675fa..48fd04c0a 100755 --- a/examples/resources/airbyte_source_pocket/resource.tf +++ b/examples/resources/airbyte_source_pocket/resource.tf @@ -2,8 +2,8 @@ resource "airbyte_source_pocket" "my_source_pocket" { configuration = { access_token = "...my_access_token..." consumer_key = "...my_consumer_key..." - content_type = "video" - detail_type = "simple" + content_type = "image" + detail_type = "complete" domain = "...my_domain..." favorite = true search = "...my_search..." @@ -13,7 +13,7 @@ resource "airbyte_source_pocket" "my_source_pocket" { state = "unread" tag = "...my_tag..." } - name = "Ada Tromp" + name = "Christina Bode" secret_id = "...my_secret_id..." - workspace_id = "266cbd95-f7aa-42b2-8113-695d1e6698fc" + workspace_id = "e2239e8f-25cd-40d1-9d95-9f439e39266c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pokeapi/resource.tf b/examples/resources/airbyte_source_pokeapi/resource.tf index 95845b122..f8a819bee 100755 --- a/examples/resources/airbyte_source_pokeapi/resource.tf +++ b/examples/resources/airbyte_source_pokeapi/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_pokeapi" "my_source_pokeapi" { pokemon_name = "snorlax" source_type = "pokeapi" } - name = "Lynn Mertz" + name = "Jeremiah Hahn" secret_id = "...my_secret_id..." - workspace_id = "17c29776-7633-4425-8038-bfb5971e9819" + workspace_id = "aa2b2411-3695-4d1e-a698-fcc4596217c2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_polygon_stock_api/resource.tf b/examples/resources/airbyte_source_polygon_stock_api/resource.tf index 6a6541e2b..5bdd339ea 100755 --- a/examples/resources/airbyte_source_polygon_stock_api/resource.tf +++ b/examples/resources/airbyte_source_polygon_stock_api/resource.tf @@ -1,17 +1,17 @@ resource "airbyte_source_polygon_stock_api" "my_source_polygonstockapi" { configuration = { - adjusted = "true" + adjusted = "false" api_key = "...my_api_key..." end_date = "2020-10-14" limit = 100 - multiplier = 2 - sort = "desc" + multiplier = 1 + sort = "asc" source_type = "polygon-stock-api" start_date = "2020-10-14" - stocks_ticker = "MSFT" + stocks_ticker = "IBM" timespan = "day" } - name = "Spencer Kirlin" + name = "Mary Fisher" secret_id = "...my_secret_id..." - workspace_id = "a39594d6-6bc2-4ae4-8063-2b9954b6fa22" + workspace_id = "fb5971e9-8190-4557-b89c-edbac7fda395" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_postgres/resource.tf b/examples/resources/airbyte_source_postgres/resource.tf index b658d2de3..2bc80bb59 100755 --- a/examples/resources/airbyte_source_postgres/resource.tf +++ b/examples/resources/airbyte_source_postgres/resource.tf @@ -6,14 +6,8 @@ resource "airbyte_source_postgres" "my_source_postgres" { password = "...my_password..." port = 5432 replication_method = { - source_postgres_replication_method_logical_replication_cdc_ = { - initial_waiting_seconds = 4 - lsn_commit_behaviour = "While reading Data" - method = "CDC" - plugin = "pgoutput" - publication = "...my_publication..." - queue_size = 4 - replication_slot = "...my_replication_slot..." + source_postgres_update_method_detect_changes_with_xmin_system_column = { + method = "Xmin" } } schemas = [ @@ -30,9 +24,9 @@ resource "airbyte_source_postgres" "my_source_postgres" { tunnel_method = "NO_TUNNEL" } } - username = "Kendrick52" + username = "Edwardo.Streich" } - name = "Vivian Dietrich" + name = "Roosevelt Cummings" secret_id = "...my_secret_id..." - workspace_id = "10006bef-4921-4ec2-853b-749366ac8ee0" + workspace_id = "480632b9-954b-46fa-a206-369828553cb1" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_posthog/resource.tf b/examples/resources/airbyte_source_posthog/resource.tf index 934d0f77b..84b31ca82 100755 --- a/examples/resources/airbyte_source_posthog/resource.tf +++ b/examples/resources/airbyte_source_posthog/resource.tf @@ -1,11 +1,12 @@ resource "airbyte_source_posthog" "my_source_posthog" { configuration = { - api_key = "...my_api_key..." - base_url = "https://posthog.example.com" - source_type = "posthog" - start_date = "2021-01-01T00:00:00Z" + api_key = "...my_api_key..." + base_url = "https://posthog.example.com" + events_time_step = 30 + source_type = "posthog" + start_date = "2021-01-01T00:00:00Z" } - name = "Darrin Bogisich" + name = "Terence Wisozk" secret_id = "...my_secret_id..." - workspace_id = "88d40d03-f3de-4ba2-97be-3e90bc40df86" + workspace_id = "21ec2053-b749-4366-ac8e-e0f2bf19588d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_postmarkapp/resource.tf b/examples/resources/airbyte_source_postmarkapp/resource.tf index 669d1de4d..2ddf60220 100755 --- a/examples/resources/airbyte_source_postmarkapp/resource.tf +++ b/examples/resources/airbyte_source_postmarkapp/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_postmarkapp" "my_source_postmarkapp" { x_postmark_account_token = "...my_x_postmark_account_token..." x_postmark_server_token = "...my_x_postmark_server_token..." } - name = "Boyd Stoltenberg" + name = "Mr. Sharon Swift" secret_id = "...my_secret_id..." - workspace_id = "405cb331-d492-4f4f-927f-b0e0bf1f8217" + workspace_id = "3deba297-be3e-490b-840d-f868fd52405c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_prestashop/resource.tf b/examples/resources/airbyte_source_prestashop/resource.tf index 4da0e6c93..d880d5bf5 100755 --- a/examples/resources/airbyte_source_prestashop/resource.tf +++ b/examples/resources/airbyte_source_prestashop/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_prestashop" "my_source_prestashop" { start_date = "2022-01-01" url = "...my_url..." } - name = "Drew Adams" + name = "Evelyn Stracke" secret_id = "...my_secret_id..." - workspace_id = "ca77aeb7-b702-41a5-a046-b64e99fb0e67" + workspace_id = "2f4f127f-b0e0-4bf1-b821-7978d0acca77" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_public_apis/resource.tf b/examples/resources/airbyte_source_public_apis/resource.tf deleted file mode 100755 index 0137b68d5..000000000 --- a/examples/resources/airbyte_source_public_apis/resource.tf +++ /dev/null @@ -1,8 +0,0 @@ -resource "airbyte_source_public_apis" "my_source_publicapis" { - configuration = { - source_type = "public-apis" - } - name = "James McLaughlin" - secret_id = "...my_secret_id..." - workspace_id = "dfed5540-ef53-4a34-a1b8-fe99731adc05" -} \ No newline at end of file diff --git a/examples/resources/airbyte_source_punk_api/resource.tf b/examples/resources/airbyte_source_punk_api/resource.tf index 6fe864fb9..c2c285f0b 100755 --- a/examples/resources/airbyte_source_punk_api/resource.tf +++ b/examples/resources/airbyte_source_punk_api/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_punk_api" "my_source_punkapi" { id = 22 source_type = "punk-api" } - name = "Santos Langosh" + name = "Darnell Turcotte" secret_id = "...my_secret_id..." - workspace_id = "51bd76ee-eb51-48c4-9a1f-ad35512f06d4" + workspace_id = "540ef53a-34a1-4b8f-a997-31adc05d85ae" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_pypi/resource.tf b/examples/resources/airbyte_source_pypi/resource.tf index 89fd0440e..05cdc7664 100755 --- a/examples/resources/airbyte_source_pypi/resource.tf +++ b/examples/resources/airbyte_source_pypi/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_pypi" "my_source_pypi" { source_type = "pypi" version = "1.2.0" } - name = "Bill Reichert" + name = "Antonia Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "f0f54856-8a04-424e-80a1-d6eb9434645d" + workspace_id = "0fb38742-90d3-4365-a1ec-a16ef89451bd" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_qualaroo/resource.tf b/examples/resources/airbyte_source_qualaroo/resource.tf index cfa7a5460..b83094854 100755 --- a/examples/resources/airbyte_source_qualaroo/resource.tf +++ b/examples/resources/airbyte_source_qualaroo/resource.tf @@ -8,7 +8,7 @@ resource "airbyte_source_qualaroo" "my_source_qualaroo" { ] token = "...my_token..." } - name = "Grace Ankunding" + name = "Sue Thompson" secret_id = "...my_secret_id..." - workspace_id = "fbba5cce-ff5c-4b01-be51-e528a45ac82b" + workspace_id = "b518c4da-1fad-4355-92f0-6d4e5b72f0f5" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_quickbooks/resource.tf b/examples/resources/airbyte_source_quickbooks/resource.tf index a4d152130..0a783e7b6 100755 --- a/examples/resources/airbyte_source_quickbooks/resource.tf +++ b/examples/resources/airbyte_source_quickbooks/resource.tf @@ -8,14 +8,14 @@ resource "airbyte_source_quickbooks" "my_source_quickbooks" { client_secret = "...my_client_secret..." realm_id = "...my_realm_id..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2022-04-17T11:28:41.720Z" + token_expiry_date = "2022-06-15T23:02:57.447Z" } } sandbox = false source_type = "quickbooks" start_date = "2021-03-20T00:00:00Z" } - name = "Nicholas Schroeder" + name = "William Gottlieb" secret_id = "...my_secret_id..." - workspace_id = "a8da4127-dd59-47ff-8711-aa1bc74b86ce" + workspace_id = "e00a1d6e-b943-4464-9d03-084fbba5ccef" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_railz/resource.tf b/examples/resources/airbyte_source_railz/resource.tf index 69de1635e..bf3d3dabd 100755 --- a/examples/resources/airbyte_source_railz/resource.tf +++ b/examples/resources/airbyte_source_railz/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_railz" "my_source_railz" { source_type = "railz" start_date = "...my_start_date..." } - name = "Myron Kunze" + name = "Clyde Schmeler Jr." secret_id = "...my_secret_id..." - workspace_id = "77b4848b-d6a6-4f04-81d2-c3b808094373" + workspace_id = "fe51e528-a45a-4c82-b85f-8bc2caba8da4" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_recharge/resource.tf b/examples/resources/airbyte_source_recharge/resource.tf index daca9e296..fceba1a03 100755 --- a/examples/resources/airbyte_source_recharge/resource.tf +++ b/examples/resources/airbyte_source_recharge/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_recharge" "my_source_recharge" { source_type = "recharge" start_date = "2021-05-14T00:00:00Z" } - name = "Karen Green" + name = "Angel Stokes" secret_id = "...my_secret_id..." - workspace_id = "bebbad02-f258-46bc-b152-558daa95be6c" + workspace_id = "7ff4711a-a1bc-474b-86ce-cc74f77b4848" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_recreation/resource.tf b/examples/resources/airbyte_source_recreation/resource.tf index 7c4706fc0..90e04ebbc 100755 --- a/examples/resources/airbyte_source_recreation/resource.tf +++ b/examples/resources/airbyte_source_recreation/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_recreation" "my_source_recreation" { query_campsites = "...my_query_campsites..." source_type = "recreation" } - name = "David Deckow" + name = "Taylor Kertzmann" secret_id = "...my_secret_id..." - workspace_id = "6c354aa4-32b4-47e1-b63c-5208c23e9802" + workspace_id = "f0441d2c-3b80-4809-8373-e060459bebba" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_recruitee/resource.tf b/examples/resources/airbyte_source_recruitee/resource.tf index fe364ff97..6efcdbe0e 100755 --- a/examples/resources/airbyte_source_recruitee/resource.tf +++ b/examples/resources/airbyte_source_recruitee/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_recruitee" "my_source_recruitee" { company_id = 9 source_type = "recruitee" } - name = "Dr. Sean Williamson" + name = "Mrs. Tina White" secret_id = "...my_secret_id..." - workspace_id = "5eb4a8b6-74ee-45cf-818e-dc7f787e32e0" + workspace_id = "6bcf1525-58da-4a95-be6c-d02756c354aa" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_recurly/resource.tf b/examples/resources/airbyte_source_recurly/resource.tf index 3cc5fc9d4..f3ce9d851 100755 --- a/examples/resources/airbyte_source_recurly/resource.tf +++ b/examples/resources/airbyte_source_recurly/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_recurly" "my_source_recurly" { end_time = "2021-12-01T00:00:00" source_type = "recurly" } - name = "Susie Donnelly" + name = "Josephine Dibbert" secret_id = "...my_secret_id..." - workspace_id = "ed0c5670-ef42-4bd3-89f1-cc503f6c39bc" + workspace_id = "7e1763c5-208c-423e-9802-d82f0d45eb4a" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_redshift/resource.tf b/examples/resources/airbyte_source_redshift/resource.tf index 1e0afbf84..8db6f068a 100755 --- a/examples/resources/airbyte_source_redshift/resource.tf +++ b/examples/resources/airbyte_source_redshift/resource.tf @@ -9,9 +9,9 @@ resource "airbyte_source_redshift" "my_source_redshift" { "...", ] source_type = "redshift" - username = "Abdullah.Oberbrunner" + username = "Nelda.Jaskolski" } - name = "Edward Wolf" + name = "Clay Hintz" secret_id = "...my_secret_id..." - workspace_id = "7f385189-ad7e-4f80-baae-03f33ca79fb9" + workspace_id = "c18edc7f-787e-432e-84b3-d3ed0c5670ef" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_retently/resource.tf b/examples/resources/airbyte_source_retently/resource.tf index a8cb09e62..611cccff5 100755 --- a/examples/resources/airbyte_source_retently/resource.tf +++ b/examples/resources/airbyte_source_retently/resource.tf @@ -10,7 +10,7 @@ resource "airbyte_source_retently" "my_source_retently" { } source_type = "retently" } - name = "Mrs. Lamar Fritsch" + name = "Kelly Pfeffer" secret_id = "...my_secret_id..." - workspace_id = "ba26fd36-8ba9-4216-bcb4-15835c736417" + workspace_id = "c9f1cc50-3f6c-439b-8d0a-6290f957f385" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_rki_covid/resource.tf b/examples/resources/airbyte_source_rki_covid/resource.tf index 656804aa6..e33e2377d 100755 --- a/examples/resources/airbyte_source_rki_covid/resource.tf +++ b/examples/resources/airbyte_source_rki_covid/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_rki_covid" "my_source_rkicovid" { source_type = "rki-covid" start_date = "...my_start_date..." } - name = "Diana Bogisich" + name = "Penny Morissette" secret_id = "...my_secret_id..." - workspace_id = "edc046bc-5163-4bbc-a492-27c42c22c553" + workspace_id = "7ef807aa-e03f-433c-a79f-b9de4032ba26" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_rss/resource.tf b/examples/resources/airbyte_source_rss/resource.tf index 4496e496b..55fce02f1 100755 --- a/examples/resources/airbyte_source_rss/resource.tf +++ b/examples/resources/airbyte_source_rss/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_rss" "my_source_rss" { source_type = "rss" url = "...my_url..." } - name = "Donna Gottlieb" + name = "Gustavo Donnelly" secret_id = "...my_secret_id..." - workspace_id = "c5dbb3c5-7c1e-4498-9e8a-a257ddc1912e" + workspace_id = "ba9216bc-b415-4835-8736-41723133edc0" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_s3/resource.tf b/examples/resources/airbyte_source_s3/resource.tf index 62a32e78a..c1afe122c 100755 --- a/examples/resources/airbyte_source_s3/resource.tf +++ b/examples/resources/airbyte_source_s3/resource.tf @@ -1,12 +1,16 @@ resource "airbyte_source_s3" "my_source_s3" { configuration = { - dataset = "...my_dataset..." + aws_access_key_id = "...my_aws_access_key_id..." + aws_secret_access_key = "...my_aws_secret_access_key..." + bucket = "...my_bucket..." + dataset = "...my_dataset..." + endpoint = "...my_endpoint..." format = { source_s3_file_format_avro = { filetype = "avro" } } - path_pattern = "myFolder/myTableFiles/*.csv|myFolder/myOtherTableFiles/*.csv" + path_pattern = "**" provider = { aws_access_key_id = "...my_aws_access_key_id..." aws_secret_access_key = "...my_aws_secret_access_key..." @@ -17,8 +21,30 @@ resource "airbyte_source_s3" "my_source_s3" { } schema = "{\"column_1\": \"number\", \"column_2\": \"string\", \"column_3\": \"array\", \"column_4\": \"object\", \"column_5\": \"boolean\"}" source_type = "s3" + start_date = "2021-01-01T00:00:00.000000Z" + streams = [ + { + days_to_sync_if_history_is_full = 1 + file_type = "...my_file_type..." + format = { + source_s3_file_based_stream_config_format_avro_format = { + double_as_string = true + filetype = "avro" + } + } + globs = [ + "...", + ] + input_schema = "...my_input_schema..." + legacy_prefix = "...my_legacy_prefix..." + name = "Flora Rempel" + primary_key = "...my_primary_key..." + schemaless = false + validation_policy = "Skip Record" + }, + ] } - name = "Ellen Reilly" + name = "Jacqueline Kiehn" secret_id = "...my_secret_id..." - workspace_id = "c5469d40-15df-4a79-a206-bef2b0a3e42c" + workspace_id = "2c22c553-5049-45c5-9bb3-c57c1e4981e8" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_salesforce/resource.tf b/examples/resources/airbyte_source_salesforce/resource.tf index 74eeb1229..b29e447e6 100755 --- a/examples/resources/airbyte_source_salesforce/resource.tf +++ b/examples/resources/airbyte_source_salesforce/resource.tf @@ -4,18 +4,18 @@ resource "airbyte_source_salesforce" "my_source_salesforce" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." force_use_bulk_api = true - is_sandbox = true + is_sandbox = false refresh_token = "...my_refresh_token..." source_type = "salesforce" - start_date = "2021-07-25T00:00:00Z" + start_date = "2021-07-25" streams_criteria = [ { - criteria = "starts with" + criteria = "not contains" value = "...my_value..." }, ] } - name = "Andy Paucek" + name = "Gregg Boyer Sr." secret_id = "...my_secret_id..." - workspace_id = "2e913558-6d18-4f9f-97a4-bfad2bf7d67c" + workspace_id = "ebde64bf-cc54-469d-8015-dfa796206bef" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_salesloft/resource.tf b/examples/resources/airbyte_source_salesloft/resource.tf index 54fd32fdc..6d96366c6 100755 --- a/examples/resources/airbyte_source_salesloft/resource.tf +++ b/examples/resources/airbyte_source_salesloft/resource.tf @@ -9,7 +9,7 @@ resource "airbyte_source_salesloft" "my_source_salesloft" { source_type = "salesloft" start_date = "2020-11-16T00:00:00Z" } - name = "Amelia Simonis" + name = "Lynda Dicki" secret_id = "...my_secret_id..." - workspace_id = "b41d6124-3531-4870-8f68-b03ad421bd43" + workspace_id = "2c1aa010-e9aa-4c2e-9135-586d18f9f97a" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sap_fieldglass/resource.tf b/examples/resources/airbyte_source_sap_fieldglass/resource.tf index 83b012e30..3b2ad185a 100755 --- a/examples/resources/airbyte_source_sap_fieldglass/resource.tf +++ b/examples/resources/airbyte_source_sap_fieldglass/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_sap_fieldglass" "my_source_sapfieldglass" { api_key = "...my_api_key..." source_type = "sap-fieldglass" } - name = "Miss Jack Wintheiser" + name = "Juana Williamson" secret_id = "...my_secret_id..." - workspace_id = "0a0003eb-22d9-4b3a-b0d9-4faa741c57d1" + workspace_id = "2bf7d67c-a84a-4d99-b41d-61243531870c" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_secoda/resource.tf b/examples/resources/airbyte_source_secoda/resource.tf index ed5a72a26..f66c05253 100755 --- a/examples/resources/airbyte_source_secoda/resource.tf +++ b/examples/resources/airbyte_source_secoda/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_secoda" "my_source_secoda" { api_key = "...my_api_key..." source_type = "secoda" } - name = "Ignacio Sporer" + name = "Brett Leannon I" secret_id = "...my_secret_id..." - workspace_id = "050d38dc-3ce1-4854-b2f9-ee69166a8be3" + workspace_id = "ad421bd4-3d1f-40cb-8a00-03eb22d9b3a7" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sendgrid/resource.tf b/examples/resources/airbyte_source_sendgrid/resource.tf index aa44857da..3a300d8e5 100755 --- a/examples/resources/airbyte_source_sendgrid/resource.tf +++ b/examples/resources/airbyte_source_sendgrid/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_sendgrid" "my_source_sendgrid" { source_type = "sendgrid" start_time = "2020-01-01T01:01:01Z" } - name = "Dana Sauer" + name = "Shari Pfannerstill" secret_id = "...my_secret_id..." - workspace_id = "3a2875c6-c1fe-4606-907d-2a9c87ae50c1" + workspace_id = "41c57d1f-edc2-4050-938d-c3ce185472f9" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sendinblue/resource.tf b/examples/resources/airbyte_source_sendinblue/resource.tf index 82a60cd4f..13be2f4c8 100755 --- a/examples/resources/airbyte_source_sendinblue/resource.tf +++ b/examples/resources/airbyte_source_sendinblue/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_sendinblue" "my_source_sendinblue" { api_key = "...my_api_key..." source_type = "sendinblue" } - name = "Miss Loretta Howell PhD" + name = "Terence Kassulke III" secret_id = "...my_secret_id..." - workspace_id = "9136a7e8-d532-413f-bf65-8752db764c59" + workspace_id = "6a8be344-4eac-48b3-a287-5c6c1fe606d0" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_senseforce/resource.tf b/examples/resources/airbyte_source_senseforce/resource.tf index e9a5992fc..11adf0994 100755 --- a/examples/resources/airbyte_source_senseforce/resource.tf +++ b/examples/resources/airbyte_source_senseforce/resource.tf @@ -3,11 +3,11 @@ resource "airbyte_source_senseforce" "my_source_senseforce" { access_token = "...my_access_token..." backend_url = "https://galaxyapi.senseforce.io" dataset_id = "8f418098-ca28-4df5-9498-0df9fe78eda7" - slice_range = 360 + slice_range = 10 source_type = "senseforce" start_date = "2017-01-25" } - name = "Nichole Treutel" + name = "Rodolfo Langworth" secret_id = "...my_secret_id..." - workspace_id = "ada29ca7-9181-4c95-a716-63c530b56651" + workspace_id = "e50c1666-1a1d-4913-aa7e-8d53213f3f65" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sentry/resource.tf b/examples/resources/airbyte_source_sentry/resource.tf index 1c578879a..9620c2f19 100755 --- a/examples/resources/airbyte_source_sentry/resource.tf +++ b/examples/resources/airbyte_source_sentry/resource.tf @@ -4,12 +4,12 @@ resource "airbyte_source_sentry" "my_source_sentry" { discover_fields = [ "{ \"see\": \"documentation\" }", ] - hostname = "imperfect-creator.name" + hostname = "muted-ingredient.biz" organization = "...my_organization..." project = "...my_project..." source_type = "sentry" } - name = "Laurie Farrell" + name = "Krystal Quitzon" secret_id = "...my_secret_id..." - workspace_id = "12ab2521-b9f2-4e07-a467-b8a40bc05fab" + workspace_id = "4c59f0a5-6ceb-4cad-a29c-a79181c95671" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sftp/resource.tf b/examples/resources/airbyte_source_sftp/resource.tf index 85ad095cb..c0b68b795 100755 --- a/examples/resources/airbyte_source_sftp/resource.tf +++ b/examples/resources/airbyte_source_sftp/resource.tf @@ -9,12 +9,12 @@ resource "airbyte_source_sftp" "my_source_sftp" { file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" file_types = "csv,json" folder_path = "/logs/2022" - host = "192.0.2.1" + host = "www.host.com" port = 22 source_type = "sftp" user = "...my_user..." } - name = "Dorothy Waters" + name = "Miss Tommy Emard" secret_id = "...my_secret_id..." - workspace_id = "22a94d20-ec90-4ea4-9d1f-465e85156fff" + workspace_id = "665163a3-6385-412a-b252-1b9f2e072467" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sftp_bulk/resource.tf b/examples/resources/airbyte_source_sftp_bulk/resource.tf index b74522fe1..64ae1edc4 100755 --- a/examples/resources/airbyte_source_sftp_bulk/resource.tf +++ b/examples/resources/airbyte_source_sftp_bulk/resource.tf @@ -1,8 +1,8 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" { configuration = { - file_most_recent = true + file_most_recent = false file_pattern = "log-([0-9]{4})([0-9]{2})([0-9]{2}) - This will filter files which `log-yearmmdd`" - file_type = "csv" + file_type = "json" folder_path = "/logs/2022" host = "192.0.2.1" password = "...my_password..." @@ -12,9 +12,9 @@ resource "airbyte_source_sftp_bulk" "my_source_sftpbulk" { source_type = "sftp-bulk" start_date = "2017-01-25T00:00:00Z" stream_name = "ftp_contacts" - username = "Donny_Wuckert34" + username = "Pearline_Bailey" } - name = "Doug Marvin" + name = "Wm Bartoletti" secret_id = "...my_secret_id..." - workspace_id = "3398dafb-42a8-4d63-b88e-4d8039ea5f9b" + workspace_id = "50edf22a-94d2-40ec-90ea-41d1f465e851" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_shopify/resource.tf b/examples/resources/airbyte_source_shopify/resource.tf index e739c5f59..d986b5622 100755 --- a/examples/resources/airbyte_source_shopify/resource.tf +++ b/examples/resources/airbyte_source_shopify/resource.tf @@ -8,9 +8,9 @@ resource "airbyte_source_shopify" "my_source_shopify" { } shop = "my-store" source_type = "shopify" - start_date = "2021-01-01" + start_date = "2022-01-02" } - name = "Rhonda Gislason" + name = "Randal Kris" secret_id = "...my_secret_id..." - workspace_id = "619039da-cd38-4ed0-9c67-1dc7f1e3af15" + workspace_id = "df54fdd5-ea95-4433-98da-fb42a8d63388" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_shortio/resource.tf b/examples/resources/airbyte_source_shortio/resource.tf index e303be4e1..bc9768ca7 100755 --- a/examples/resources/airbyte_source_shortio/resource.tf +++ b/examples/resources/airbyte_source_shortio/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_shortio" "my_source_shortio" { source_type = "shortio" start_date = "2023-07-30T03:43:59.244Z" } - name = "Carlos Armstrong" + name = "Troy Streich I" secret_id = "...my_secret_id..." - workspace_id = "0d1b4901-f2bd-489c-8a32-639da5b7b690" + workspace_id = "9ea5f9b1-8a24-44fd-a190-39dacd38ed0d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_slack/resource.tf b/examples/resources/airbyte_source_slack/resource.tf index c8e76ed08..19d0d7616 100755 --- a/examples/resources/airbyte_source_slack/resource.tf +++ b/examples/resources/airbyte_source_slack/resource.tf @@ -9,12 +9,12 @@ resource "airbyte_source_slack" "my_source_slack" { option_title = "API Token Credentials" } } - join_channels = true - lookback_window = 14 + join_channels = false + lookback_window = 7 source_type = "slack" start_date = "2017-01-25T00:00:00Z" } - name = "Eduardo Gottlieb" + name = "Dr. Jamie Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "43664a8f-0af8-4c69-9d73-2d9fbaf9476a" + workspace_id = "af15920c-90d1-4b49-81f2-bd89c8a32639" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_smaily/resource.tf b/examples/resources/airbyte_source_smaily/resource.tf index 667457072..bdf367829 100755 --- a/examples/resources/airbyte_source_smaily/resource.tf +++ b/examples/resources/airbyte_source_smaily/resource.tf @@ -5,7 +5,7 @@ resource "airbyte_source_smaily" "my_source_smaily" { api_username = "...my_api_username..." source_type = "smaily" } - name = "Lynda Thiel" + name = "Donnie Hauck" secret_id = "...my_secret_id..." - workspace_id = "cc50c8a3-512c-4737-8489-30750a00e966" + workspace_id = "b6902b88-1a94-4f64-b664-a8f0af8c691d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_smartengage/resource.tf b/examples/resources/airbyte_source_smartengage/resource.tf index e56c11562..3d73a1f37 100755 --- a/examples/resources/airbyte_source_smartengage/resource.tf +++ b/examples/resources/airbyte_source_smartengage/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_smartengage" "my_source_smartengage" { api_key = "...my_api_key..." source_type = "smartengage" } - name = "Guillermo Kunze" + name = "Carmen Crist" secret_id = "...my_secret_id..." - workspace_id = "d4319439-8c78-43c9-a398-ed3d3ab7ca3c" + workspace_id = "fbaf9476-a2ae-48dc-850c-8a3512c73784" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_smartsheets/resource.tf b/examples/resources/airbyte_source_smartsheets/resource.tf index 6b1e1dc69..addba7c24 100755 --- a/examples/resources/airbyte_source_smartsheets/resource.tf +++ b/examples/resources/airbyte_source_smartsheets/resource.tf @@ -7,13 +7,13 @@ resource "airbyte_source_smartsheets" "my_source_smartsheets" { } } metadata_fields = [ - "sheetversion", + "row_access_level", ] source_type = "smartsheets" spreadsheet_id = "...my_spreadsheet_id..." start_datetime = "2000-01-01T13:00:00-07:00" } - name = "Michele Muller" + name = "Joann Bechtelar Jr." secret_id = "...my_secret_id..." - workspace_id = "0cfd5d69-89b7-4206-8510-77d19ea83d49" + workspace_id = "e966ec73-6d43-4194-b98c-783c92398ed3" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_snapchat_marketing/resource.tf b/examples/resources/airbyte_source_snapchat_marketing/resource.tf index 10aaf3162..ed5f2884d 100755 --- a/examples/resources/airbyte_source_snapchat_marketing/resource.tf +++ b/examples/resources/airbyte_source_snapchat_marketing/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_snapchat_marketing" "my_source_snapchatmarketing" { source_type = "snapchat-marketing" start_date = "2022-01-01" } - name = "Hannah MacGyver" + name = "Chelsea Ortiz" secret_id = "...my_secret_id..." - workspace_id = "c1954545-e955-4dcc-985e-a4901c7c43ad" + workspace_id = "5ca8649a-70cf-4d5d-a989-b7206451077d" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_snowflake/resource.tf b/examples/resources/airbyte_source_snowflake/resource.tf index 2f0afc0b2..15f3db7ca 100755 --- a/examples/resources/airbyte_source_snowflake/resource.tf +++ b/examples/resources/airbyte_source_snowflake/resource.tf @@ -17,7 +17,7 @@ resource "airbyte_source_snowflake" "my_source_snowflake" { source_type = "snowflake" warehouse = "AIRBYTE_WAREHOUSE" } - name = "Betsy Osinski" + name = "Katrina Tillman" secret_id = "...my_secret_id..." - workspace_id = "84aba3d2-30ed-4f73-811a-115382bd7ed5" + workspace_id = "3d492ed1-4b8a-42c1-9545-45e955dcc185" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_sonar_cloud/resource.tf b/examples/resources/airbyte_source_sonar_cloud/resource.tf index 76bd4f8e4..0de15f144 100755 --- a/examples/resources/airbyte_source_sonar_cloud/resource.tf +++ b/examples/resources/airbyte_source_sonar_cloud/resource.tf @@ -9,7 +9,7 @@ resource "airbyte_source_sonar_cloud" "my_source_sonarcloud" { start_date = "YYYY-MM-DD" user_token = "...my_user_token..." } - name = "Judy Bogan" + name = "Mildred Rosenbaum" secret_id = "...my_secret_id..." - workspace_id = "8f4d7396-564c-420a-8711-a961d24a7dbb" + workspace_id = "43ad2daa-784a-4ba3-9230-edf73811a115" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_spacex_api/resource.tf b/examples/resources/airbyte_source_spacex_api/resource.tf index 946f1bdb3..8824f13ee 100755 --- a/examples/resources/airbyte_source_spacex_api/resource.tf +++ b/examples/resources/airbyte_source_spacex_api/resource.tf @@ -1,10 +1,10 @@ resource "airbyte_source_spacex_api" "my_source_spacexapi" { configuration = { - id = "8f532d89-2cf7-4812-8b51-2c878240bf54" + id = "382bd7ed-5650-4762-9c58-f4d7396564c2" options = "...my_options..." source_type = "spacex-api" } - name = "Randal Lockman" + name = "Lee Batz Jr." secret_id = "...my_secret_id..." - workspace_id = "8f1bf0bc-8e1f-4206-95d8-31d0081090f6" + workspace_id = "a961d24a-7dbb-48f5-b2d8-92cf7812cb51" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_square/resource.tf b/examples/resources/airbyte_source_square/resource.tf index 26e625d78..61c86f490 100755 --- a/examples/resources/airbyte_source_square/resource.tf +++ b/examples/resources/airbyte_source_square/resource.tf @@ -6,12 +6,12 @@ resource "airbyte_source_square" "my_source_square" { auth_type = "API Key" } } - include_deleted_objects = false + include_deleted_objects = true is_sandbox = false source_type = "square" - start_date = "2022-08-13" + start_date = "2022-02-01" } - name = "Josephine Yundt" + name = "Miss Bruce Gibson" secret_id = "...my_secret_id..." - workspace_id = "681c5768-dce7-4424-89a2-15e08601489a" + workspace_id = "548f88f8-f1bf-40bc-8e1f-206d5d831d00" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_strava/resource.tf b/examples/resources/airbyte_source_strava/resource.tf index 13728b278..9f3ccc201 100755 --- a/examples/resources/airbyte_source_strava/resource.tf +++ b/examples/resources/airbyte_source_strava/resource.tf @@ -8,7 +8,7 @@ resource "airbyte_source_strava" "my_source_strava" { source_type = "strava" start_date = "2021-03-01T00:00:00Z" } - name = "Henrietta Durgan" + name = "Jeffrey Wintheiser" secret_id = "...my_secret_id..." - workspace_id = "3dd9dda3-3dcd-4634-83e4-a7a98e4df37e" + workspace_id = "06673f3a-681c-4576-8dce-742409a215e0" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_stripe/resource.tf b/examples/resources/airbyte_source_stripe/resource.tf index 64a5549bd..2d22ec1b8 100755 --- a/examples/resources/airbyte_source_stripe/resource.tf +++ b/examples/resources/airbyte_source_stripe/resource.tf @@ -2,12 +2,12 @@ resource "airbyte_source_stripe" "my_source_stripe" { configuration = { account_id = "...my_account_id..." client_secret = "...my_client_secret..." - lookback_window_days = 3 + lookback_window_days = 5 slice_range = 10 source_type = "stripe" start_date = "2017-01-25T00:00:00Z" } - name = "Bernice Schultz I" + name = "Seth Nitzsche" secret_id = "...my_secret_id..." - workspace_id = "e13a4823-1090-47bd-b54c-092bd734f024" + workspace_id = "63e3af3d-d9dd-4a33-9cd6-3483e4a7a98e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_survey_sparrow/resource.tf b/examples/resources/airbyte_source_survey_sparrow/resource.tf index 83ee375f9..7c5f90d2e 100755 --- a/examples/resources/airbyte_source_survey_sparrow/resource.tf +++ b/examples/resources/airbyte_source_survey_sparrow/resource.tf @@ -11,7 +11,7 @@ resource "airbyte_source_survey_sparrow" "my_source_surveysparrow" { "{ \"see\": \"documentation\" }", ] } - name = "Merle Keebler Jr." + name = "Hugo Kovacek" secret_id = "...my_secret_id..." - workspace_id = "e9db3ad4-c6b0-4310-8d9c-337473082b94" + workspace_id = "f02449d8-6f4b-4b20-be5d-911cbfe749ca" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_surveymonkey/resource.tf b/examples/resources/airbyte_source_surveymonkey/resource.tf index 1f8978459..a6b11652b 100755 --- a/examples/resources/airbyte_source_surveymonkey/resource.tf +++ b/examples/resources/airbyte_source_surveymonkey/resource.tf @@ -13,7 +13,7 @@ resource "airbyte_source_surveymonkey" "my_source_surveymonkey" { "...", ] } - name = "Cecil Wintheiser" + name = "Pearl Trantow" secret_id = "...my_secret_id..." - workspace_id = "b20fe5d9-11cb-4fe7-89ca-f45a27f69e2c" + workspace_id = "b8955d41-3e13-4a48-a310-907bd354c092" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_tempo/resource.tf b/examples/resources/airbyte_source_tempo/resource.tf index 347880135..03e5b4828 100755 --- a/examples/resources/airbyte_source_tempo/resource.tf +++ b/examples/resources/airbyte_source_tempo/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_tempo" "my_source_tempo" { api_token = "...my_api_token..." source_type = "tempo" } - name = "Alan O'Conner DVM" + name = "Edwin Haley" secret_id = "...my_secret_id..." - workspace_id = "d5671e9c-3263-450a-8671-43789ce0e991" + workspace_id = "7f69e2c9-e6d1-40e9-9b3a-d4c6b03108d9" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_the_guardian_api/resource.tf b/examples/resources/airbyte_source_the_guardian_api/resource.tf index 486db48a2..678d3a191 100755 --- a/examples/resources/airbyte_source_the_guardian_api/resource.tf +++ b/examples/resources/airbyte_source_the_guardian_api/resource.tf @@ -2,13 +2,13 @@ resource "airbyte_source_the_guardian_api" "my_source_theguardianapi" { configuration = { api_key = "...my_api_key..." end_date = "YYYY-MM-DD" - query = "environment AND political" - section = "technology" + query = "political" + section = "media" source_type = "the-guardian-api" start_date = "YYYY-MM-DD" tag = "environment/recycling" } - name = "Arturo Fay" + name = "Pauline Kozey IV" secret_id = "...my_secret_id..." - workspace_id = "4c0252fe-3b4b-44db-8b77-8ebb6e1d2cf5" + workspace_id = "2b94f2ab-1fd5-4671-a9c3-26350a467143" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_tiktok_marketing/resource.tf b/examples/resources/airbyte_source_tiktok_marketing/resource.tf index dac6984ca..21a1160b5 100755 --- a/examples/resources/airbyte_source_tiktok_marketing/resource.tf +++ b/examples/resources/airbyte_source_tiktok_marketing/resource.tf @@ -1,6 +1,6 @@ resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" { configuration = { - attribution_window = 0 + attribution_window = 5 credentials = { source_tiktok_marketing_authentication_method_o_auth2_0 = { access_token = "...my_access_token..." @@ -10,12 +10,12 @@ resource "airbyte_source_tiktok_marketing" "my_source_tiktokmarketing" { secret = "...my_secret..." } } - end_date = "2022-04-16" - include_deleted = true + end_date = "2021-10-08" + include_deleted = false source_type = "tiktok-marketing" - start_date = "2020-11-25" + start_date = "2022-12-21" } - name = "Vicky Reichert" + name = "Mrs. Joey Mueller" secret_id = "...my_secret_id..." - workspace_id = "635d5e65-da02-48c3-a951-a1e30fda9664" + workspace_id = "4d93a74c-0252-4fe3-b4b4-db8b778ebb6e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_todoist/resource.tf b/examples/resources/airbyte_source_todoist/resource.tf index 425e62df8..0400ee0c2 100755 --- a/examples/resources/airbyte_source_todoist/resource.tf +++ b/examples/resources/airbyte_source_todoist/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_todoist" "my_source_todoist" { source_type = "todoist" token = "...my_token..." } - name = "Tracy Senger" + name = "Hope Collins" secret_id = "...my_secret_id..." - workspace_id = "78673e13-a12a-46b9-9249-4594487f5c84" + workspace_id = "502bafb2-cbc4-4635-95e6-5da028c3e951" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_trello/resource.tf b/examples/resources/airbyte_source_trello/resource.tf index 803db7460..ef2bb2452 100755 --- a/examples/resources/airbyte_source_trello/resource.tf +++ b/examples/resources/airbyte_source_trello/resource.tf @@ -8,7 +8,7 @@ resource "airbyte_source_trello" "my_source_trello" { start_date = "2021-03-01T00:00:00Z" token = "...my_token..." } - name = "Rosemary Rice" + name = "Philip Armstrong" secret_id = "...my_secret_id..." - workspace_id = "b3cdf641-5b04-449f-9df1-3f4eedbe78bf" + workspace_id = "a966489d-7b78-4673-a13a-12a6b9924945" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_trustpilot/resource.tf b/examples/resources/airbyte_source_trustpilot/resource.tf index 17ae82a8f..fcfc79005 100755 --- a/examples/resources/airbyte_source_trustpilot/resource.tf +++ b/examples/resources/airbyte_source_trustpilot/resource.tf @@ -12,7 +12,7 @@ resource "airbyte_source_trustpilot" "my_source_trustpilot" { source_type = "trustpilot" start_date = "%Y-%m-%dT%H:%M:%S" } - name = "Betty Jast" + name = "Bradley Goodwin" secret_id = "...my_secret_id..." - workspace_id = "5894ea76-3d5c-4727-95b7-85148d6d549e" + workspace_id = "f5c84383-6b86-4b3c-9f64-15b0449f9df1" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_tvmaze_schedule/resource.tf b/examples/resources/airbyte_source_tvmaze_schedule/resource.tf index 1f76070eb..ce39a3b63 100755 --- a/examples/resources/airbyte_source_tvmaze_schedule/resource.tf +++ b/examples/resources/airbyte_source_tvmaze_schedule/resource.tf @@ -4,9 +4,9 @@ resource "airbyte_source_tvmaze_schedule" "my_source_tvmazeschedule" { end_date = "...my_end_date..." source_type = "tvmaze-schedule" start_date = "...my_start_date..." - web_schedule_country_code = "GB" + web_schedule_country_code = "global" } - name = "Audrey Rippin" + name = "Gretchen Waters" secret_id = "...my_secret_id..." - workspace_id = "bc0f970c-42fc-49f4-8442-25e75b796065" + workspace_id = "e78bf606-8258-494e-a763-d5c72795b785" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_twilio/resource.tf b/examples/resources/airbyte_source_twilio/resource.tf index fd8aff4e2..8d0453266 100755 --- a/examples/resources/airbyte_source_twilio/resource.tf +++ b/examples/resources/airbyte_source_twilio/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_twilio" "my_source_twilio" { source_type = "twilio" start_date = "2020-10-01T00:00:00Z" } - name = "Oliver Kautzer" + name = "Andre Sporer" secret_id = "...my_secret_id..." - workspace_id = "3b90a1b8-c95b-4e12-94b7-39f4fe77210d" + workspace_id = "9e5635b3-3bc0-4f97-8c42-fc9f4844225e" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_twilio_taskrouter/resource.tf b/examples/resources/airbyte_source_twilio_taskrouter/resource.tf index bd7e35c14..3586436da 100755 --- a/examples/resources/airbyte_source_twilio_taskrouter/resource.tf +++ b/examples/resources/airbyte_source_twilio_taskrouter/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_twilio_taskrouter" "my_source_twiliotaskrouter" { auth_token = "...my_auth_token..." source_type = "twilio-taskrouter" } - name = "Marta Hodkiewicz" + name = "Cathy Ratke" secret_id = "...my_secret_id..." - workspace_id = "8c99c722-d2bc-40f9-8087-d9caae042dd7" + workspace_id = "6065c0ef-a6f9-43b9-8a1b-8c95be1254b7" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_twitter/resource.tf b/examples/resources/airbyte_source_twitter/resource.tf index 8719133d6..32d6dcd9b 100755 --- a/examples/resources/airbyte_source_twitter/resource.tf +++ b/examples/resources/airbyte_source_twitter/resource.tf @@ -1,12 +1,12 @@ resource "airbyte_source_twitter" "my_source_twitter" { configuration = { api_key = "...my_api_key..." - end_date = "2020-12-31T16:49:13.658Z" + end_date = "2022-05-29T22:05:47.839Z" query = "...my_query..." source_type = "twitter" - start_date = "2021-05-21T04:49:52.479Z" + start_date = "2022-02-11T15:55:53.597Z" } - name = "Marco Gleichner" + name = "Elbert Kuhic" secret_id = "...my_secret_id..." - workspace_id = "a1cfe9e1-5df9-4039-87f3-7831983d42e5" + workspace_id = "10d1f655-8c99-4c72-ad2b-c0f94087d9ca" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_typeform/resource.tf b/examples/resources/airbyte_source_typeform/resource.tf index 2e2f41c0e..82cc96196 100755 --- a/examples/resources/airbyte_source_typeform/resource.tf +++ b/examples/resources/airbyte_source_typeform/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_typeform" "my_source_typeform" { client_id = "...my_client_id..." client_secret = "...my_client_secret..." refresh_token = "...my_refresh_token..." - token_expiry_date = "2022-05-15T15:21:36.066Z" + token_expiry_date = "2021-02-23T09:05:08.511Z" } } form_ids = [ @@ -16,7 +16,7 @@ resource "airbyte_source_typeform" "my_source_typeform" { source_type = "typeform" start_date = "2021-03-01T00:00:00Z" } - name = "Lucy Johnson" + name = "Rosemarie Spencer" secret_id = "...my_secret_id..." - workspace_id = "7c50233c-1471-4d51-aaa6-ddf5abd6487c" + workspace_id = "aac9b4ca-a1cf-4e9e-95df-903907f37831" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_us_census/resource.tf b/examples/resources/airbyte_source_us_census/resource.tf index e8b2a9a28..3761f849f 100755 --- a/examples/resources/airbyte_source_us_census/resource.tf +++ b/examples/resources/airbyte_source_us_census/resource.tf @@ -1,11 +1,11 @@ resource "airbyte_source_us_census" "my_source_uscensus" { configuration = { api_key = "...my_api_key..." - query_params = "get=NAME,NAICS2017_LABEL,LFO_LABEL,EMPSZES_LABEL,ESTAB,PAYANN,PAYQTR1,EMP&for=us:*&NAICS2017=72&LFO=001&EMPSZES=001" - query_path = "data/timeseries/healthins/sahie" + query_params = "get=MOVEDIN,GEOID1,GEOID2,MOVEDOUT,FULL1_NAME,FULL2_NAME,MOVEDNET&for=county:*" + query_path = "data/2018/acs" source_type = "us-census" } - name = "Brandon Rogahn" + name = "Ginger Gislason" secret_id = "...my_secret_id..." - workspace_id = "2a00bef6-9e10-4015-b630-bda7afded84a" + workspace_id = "54a85466-597c-4502-b3c1-471d51aaa6dd" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_vantage/resource.tf b/examples/resources/airbyte_source_vantage/resource.tf index 506ad1338..23c9ccef6 100755 --- a/examples/resources/airbyte_source_vantage/resource.tf +++ b/examples/resources/airbyte_source_vantage/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_vantage" "my_source_vantage" { access_token = "...my_access_token..." source_type = "vantage" } - name = "Veronica Pagac Sr." + name = "Corey Pacocha" secret_id = "...my_secret_id..." - workspace_id = "38e1a735-ac26-4ae3-bbef-971a8f46bca1" + workspace_id = "6487c5fc-2b86-42a0-8bef-69e100157630" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_webflow/resource.tf b/examples/resources/airbyte_source_webflow/resource.tf index 139e5f57a..c667b4b1d 100755 --- a/examples/resources/airbyte_source_webflow/resource.tf +++ b/examples/resources/airbyte_source_webflow/resource.tf @@ -4,7 +4,7 @@ resource "airbyte_source_webflow" "my_source_webflow" { site_id = "a relatively long hex sequence" source_type = "webflow" } - name = "Jennifer Johnson" + name = "Taylor Paucek" secret_id = "...my_secret_id..." - workspace_id = "965b711d-08cf-488e-89f7-b99a550a656e" + workspace_id = "fded84a3-5a41-4238-a1a7-35ac26ae33be" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_whisky_hunter/resource.tf b/examples/resources/airbyte_source_whisky_hunter/resource.tf index d68fcb199..296450e92 100755 --- a/examples/resources/airbyte_source_whisky_hunter/resource.tf +++ b/examples/resources/airbyte_source_whisky_hunter/resource.tf @@ -2,7 +2,7 @@ resource "airbyte_source_whisky_hunter" "my_source_whiskyhunter" { configuration = { source_type = "whisky-hunter" } - name = "Curtis Dickens" + name = "Miss Terrence Kulas" secret_id = "...my_secret_id..." - workspace_id = "b0ce8aa6-5432-4a98-aeb7-e14ca5640891" + workspace_id = "f46bca11-06fe-4965-b711-d08cf88ec9f7" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf b/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf index 2339b8e38..8646b4062 100755 --- a/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf +++ b/examples/resources/airbyte_source_wikipedia_pageviews/resource.tf @@ -1,15 +1,15 @@ resource "airbyte_source_wikipedia_pageviews" "my_source_wikipediapageviews" { configuration = { - access = "desktop" - agent = "all-agents" + access = "mobile-app" + agent = "spider" article = "Are_You_the_One%3F" - country = "FR" + country = "IN" end = "...my_end..." project = "www.mediawiki.org" source_type = "wikipedia-pageviews" start = "...my_start..." } - name = "Catherine Mitchell" + name = "Laura Murray" secret_id = "...my_secret_id..." - workspace_id = "8f88ece7-bf90-44e0-9105-d38908162c6b" + workspace_id = "6ed333bb-0ce8-4aa6-9432-a986eb7e14ca" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_woocommerce/resource.tf b/examples/resources/airbyte_source_woocommerce/resource.tf index d0da6a498..3dc16611e 100755 --- a/examples/resources/airbyte_source_woocommerce/resource.tf +++ b/examples/resources/airbyte_source_woocommerce/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_woocommerce" "my_source_woocommerce" { source_type = "woocommerce" start_date = "2021-01-01" } - name = "Dr. Mattie Nader" + name = "Laura Lindgren III" secret_id = "...my_secret_id..." - workspace_id = "57b7d03a-1480-4f8d-a30f-069d810618d9" + workspace_id = "0097019a-48f8-48ec-a7bf-904e01105d38" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_xero/resource.tf b/examples/resources/airbyte_source_xero/resource.tf index 733835fe6..d6c017113 100755 --- a/examples/resources/airbyte_source_xero/resource.tf +++ b/examples/resources/airbyte_source_xero/resource.tf @@ -11,7 +11,7 @@ resource "airbyte_source_xero" "my_source_xero" { start_date = "2022-03-01T00:00:00Z" tenant_id = "...my_tenant_id..." } - name = "Geraldine Crist" + name = "Roger Hudson" secret_id = "...my_secret_id..." - workspace_id = "7510da80-3122-492c-861c-2a702bb97ee1" + workspace_id = "6beb68a0-f657-4b7d-83a1-480f8de30f06" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_xkcd/resource.tf b/examples/resources/airbyte_source_xkcd/resource.tf index 6df5355c6..4c5088013 100755 --- a/examples/resources/airbyte_source_xkcd/resource.tf +++ b/examples/resources/airbyte_source_xkcd/resource.tf @@ -2,7 +2,7 @@ resource "airbyte_source_xkcd" "my_source_xkcd" { configuration = { source_type = "xkcd" } - name = "Bonnie Steuber" + name = "Mr. Laurence Littel" secret_id = "...my_secret_id..." - workspace_id = "de35f8e0-1bf3-43ea-ab45-402ac1704bf1" + workspace_id = "18d97e15-2297-4510-9a80-312292cc61c2" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_yandex_metrica/resource.tf b/examples/resources/airbyte_source_yandex_metrica/resource.tf index 221c9dbc1..63c10201a 100755 --- a/examples/resources/airbyte_source_yandex_metrica/resource.tf +++ b/examples/resources/airbyte_source_yandex_metrica/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_yandex_metrica" "my_source_yandexmetrica" { source_type = "yandex-metrica" start_date = "2022-01-01" } - name = "Reginald Carter" + name = "Dominic Marvin" secret_id = "...my_secret_id..." - workspace_id = "e5eb5f0c-492b-4574-8d08-a2267aaee79e" + workspace_id = "e102da2d-e35f-48e0-9bf3-3eaab45402ac" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_yotpo/resource.tf b/examples/resources/airbyte_source_yotpo/resource.tf index 41e492655..8f2d3e22c 100755 --- a/examples/resources/airbyte_source_yotpo/resource.tf +++ b/examples/resources/airbyte_source_yotpo/resource.tf @@ -2,11 +2,11 @@ resource "airbyte_source_yotpo" "my_source_yotpo" { configuration = { access_token = "...my_access_token..." app_key = "...my_app_key..." - email = "Petra66@gmail.com" + email = "Ibrahim74@gmail.com" source_type = "yotpo" start_date = "2022-03-01T00:00:00.000Z" } - name = "Hannah Volkman" + name = "Clark McGlynn" secret_id = "...my_secret_id..." - workspace_id = "83d2378a-e3bf-4c23-9945-0a986a495bac" + workspace_id = "61aae5eb-5f0c-4492-b574-4d08a2267aae" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_younium/resource.tf b/examples/resources/airbyte_source_younium/resource.tf index fd9045cc6..3ebeaeb4e 100755 --- a/examples/resources/airbyte_source_younium/resource.tf +++ b/examples/resources/airbyte_source_younium/resource.tf @@ -2,11 +2,11 @@ resource "airbyte_source_younium" "my_source_younium" { configuration = { legal_entity = "...my_legal_entity..." password = "...my_password..." - playground = false + playground = true source_type = "younium" - username = "Angeline.Kunze40" + username = "Jairo.Monahan79" } - name = "Ernest Larkin" + name = "Martha Orn" secret_id = "...my_secret_id..." - workspace_id = "c8649238-6f62-4c96-9c4c-c6b78890a3fd" + workspace_id = "1becb83d-2378-4ae3-bfc2-3d9450a986a4" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_youtube_analytics/resource.tf b/examples/resources/airbyte_source_youtube_analytics/resource.tf index 4bb03ed19..c2dc84e6b 100755 --- a/examples/resources/airbyte_source_youtube_analytics/resource.tf +++ b/examples/resources/airbyte_source_youtube_analytics/resource.tf @@ -7,7 +7,7 @@ resource "airbyte_source_youtube_analytics" "my_source_youtubeanalytics" { } source_type = "youtube-analytics" } - name = "Dr. Kara Lowe" + name = "Tommy Rippin" secret_id = "...my_secret_id..." - workspace_id = "10f8c23d-f931-4da3-adb5-1fad94acc943" + workspace_id = "707f06b2-8ecc-4864-9238-6f62c969c4cc" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zendesk_chat/resource.tf b/examples/resources/airbyte_source_zendesk_chat/resource.tf index 135abe9d0..92b8327b0 100755 --- a/examples/resources/airbyte_source_zendesk_chat/resource.tf +++ b/examples/resources/airbyte_source_zendesk_chat/resource.tf @@ -10,7 +10,7 @@ resource "airbyte_source_zendesk_chat" "my_source_zendeskchat" { start_date = "2021-02-01T00:00:00Z" subdomain = "...my_subdomain..." } - name = "Melinda Koch" + name = "Mabel Lebsack MD" secret_id = "...my_secret_id..." - workspace_id = "d15321b8-32a5-46d6-9180-ff60eb9a6658" + workspace_id = "3fd3c81d-a10f-48c2-bdf9-31da3edb51fa" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zendesk_sunshine/resource.tf b/examples/resources/airbyte_source_zendesk_sunshine/resource.tf index 06dfaa19e..1ec0edb61 100755 --- a/examples/resources/airbyte_source_zendesk_sunshine/resource.tf +++ b/examples/resources/airbyte_source_zendesk_sunshine/resource.tf @@ -4,14 +4,14 @@ resource "airbyte_source_zendesk_sunshine" "my_source_zendesksunshine" { source_zendesk_sunshine_authorization_method_api_token = { api_token = "...my_api_token..." auth_method = "api_token" - email = "Hollis.Mann72@hotmail.com" + email = "Leonor_Funk@hotmail.com" } } source_type = "zendesk-sunshine" start_date = "2021-01-01T00:00:00Z" subdomain = "...my_subdomain..." } - name = "Alexander Friesen" + name = "Mrs. Edith Hermiston" secret_id = "...my_secret_id..." - workspace_id = "82dbec75-c68c-4606-9946-8ce304d8849b" + workspace_id = "726d1532-1b83-42a5-ad69-180ff60eb9a6" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zendesk_support/resource.tf b/examples/resources/airbyte_source_zendesk_support/resource.tf index decdb6a4f..795dc3b22 100755 --- a/examples/resources/airbyte_source_zendesk_support/resource.tf +++ b/examples/resources/airbyte_source_zendesk_support/resource.tf @@ -4,15 +4,15 @@ resource "airbyte_source_zendesk_support" "my_source_zendesksupport" { source_zendesk_support_authentication_api_token = { api_token = "...my_api_token..." credentials = "api_token" - email = "Kacie27@hotmail.com" + email = "Ezequiel.Lindgren56@yahoo.com" } } - ignore_pagination = false + ignore_pagination = true source_type = "zendesk-support" start_date = "2020-10-15T00:00:00Z" subdomain = "...my_subdomain..." } - name = "May McClure" + name = "Alexander Friesen" secret_id = "...my_secret_id..." - workspace_id = "b0c69e37-2db1-4344-ba9f-78a5c0ed7aab" + workspace_id = "82dbec75-c68c-4606-9946-8ce304d8849b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zendesk_talk/resource.tf b/examples/resources/airbyte_source_zendesk_talk/resource.tf index 333c9f735..86c3f87c4 100755 --- a/examples/resources/airbyte_source_zendesk_talk/resource.tf +++ b/examples/resources/airbyte_source_zendesk_talk/resource.tf @@ -4,14 +4,14 @@ resource "airbyte_source_zendesk_talk" "my_source_zendesktalk" { source_zendesk_talk_authentication_api_token = { api_token = "...my_api_token..." auth_type = "api_token" - email = "Brant.Walter@yahoo.com" + email = "Kacie27@hotmail.com" } } source_type = "zendesk-talk" start_date = "2020-10-15T00:00:00Z" subdomain = "...my_subdomain..." } - name = "Dr. Lana Ritchie" + name = "Jackie Welch" secret_id = "...my_secret_id..." - workspace_id = "8d27b519-96b5-4b4b-90ee-f712b7a7ab03" + workspace_id = "bb0c69e3-72db-4134-8ba9-f78a5c0ed7aa" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zenloop/resource.tf b/examples/resources/airbyte_source_zenloop/resource.tf index 2edd03333..0f6d18eb5 100755 --- a/examples/resources/airbyte_source_zenloop/resource.tf +++ b/examples/resources/airbyte_source_zenloop/resource.tf @@ -6,7 +6,7 @@ resource "airbyte_source_zenloop" "my_source_zenloop" { survey_group_id = "...my_survey_group_id..." survey_id = "...my_survey_id..." } - name = "Ms. Emma Rodriguez Jr." + name = "Ricardo Champlin" secret_id = "...my_secret_id..." - workspace_id = "688deebe-f897-4f3d-90cc-d33f11b3e4e0" + workspace_id = "7261fb0c-58d2-47b5-9996-b5b4b50eef71" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zoho_crm/resource.tf b/examples/resources/airbyte_source_zoho_crm/resource.tf index 7a620c8b3..9aebdfb91 100755 --- a/examples/resources/airbyte_source_zoho_crm/resource.tf +++ b/examples/resources/airbyte_source_zoho_crm/resource.tf @@ -2,14 +2,14 @@ resource "airbyte_source_zoho_crm" "my_source_zohocrm" { configuration = { client_id = "...my_client_id..." client_secret = "...my_client_secret..." - dc_region = "IN" - edition = "Free" + dc_region = "US" + edition = "Enterprise" environment = "Developer" refresh_token = "...my_refresh_token..." source_type = "zoho-crm" start_datetime = "2000-01-01T13:00+00:00" } - name = "Alice Lind" + name = "Kenneth Fisher" secret_id = "...my_secret_id..." - workspace_id = "c759e02f-3702-4c5c-8e2d-30ead3104fa4" + workspace_id = "b1710688-deeb-4ef8-97f3-dd0ccd33f11b" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zoom/resource.tf b/examples/resources/airbyte_source_zoom/resource.tf index 96275739e..bfac92c45 100755 --- a/examples/resources/airbyte_source_zoom/resource.tf +++ b/examples/resources/airbyte_source_zoom/resource.tf @@ -3,7 +3,7 @@ resource "airbyte_source_zoom" "my_source_zoom" { jwt_token = "...my_jwt_token..." source_type = "zoom" } - name = "Charlene Abbott" + name = "Alexis Gutmann IV" secret_id = "...my_secret_id..." - workspace_id = "f375b442-8282-41fd-b2f6-9e59267c71cc" + workspace_id = "0aa10418-6ec7-459e-82f3-702c5c8e2d30" } \ No newline at end of file diff --git a/examples/resources/airbyte_source_zuora/resource.tf b/examples/resources/airbyte_source_zuora/resource.tf index b31bc08eb..d1e679297 100755 --- a/examples/resources/airbyte_source_zuora/resource.tf +++ b/examples/resources/airbyte_source_zuora/resource.tf @@ -5,10 +5,10 @@ resource "airbyte_source_zuora" "my_source_zuora" { data_query = "Unlimited" source_type = "zuora" start_date = "...my_start_date..." - tenant_endpoint = "EU API Sandbox" - window_in_days = "1" + tenant_endpoint = "US Performance Test" + window_in_days = "200" } - name = "Carroll Grant" + name = "Joan Bednar" secret_id = "...my_secret_id..." - workspace_id = "8d0358a8-2c80-48fe-a751-a2047c0449e1" + workspace_id = "a44707bf-375b-4442-8282-1fdb2f69e592" } \ No newline at end of file diff --git a/examples/resources/airbyte_workspace/resource.tf b/examples/resources/airbyte_workspace/resource.tf index ce429fdc7..e67208f35 100755 --- a/examples/resources/airbyte_workspace/resource.tf +++ b/examples/resources/airbyte_workspace/resource.tf @@ -1,3 +1,3 @@ resource "airbyte_workspace" "my_workspace" { - name = "Crystal Weissnat" + name = "Glenda Schiller DDS" } \ No newline at end of file diff --git a/files.gen b/files.gen index 28d184a66..d389f8a38 100755 --- a/files.gen +++ b/files.gen @@ -99,6 +99,16 @@ internal/provider/type_destination_langchain_indexing_pinecone.go internal/provider/type_destination_langchain_indexing.go internal/provider/type_destination_langchain_processing_config_model.go internal/provider/type_destination_langchain.go +internal/provider/type_destination_milvus_embedding_cohere.go +internal/provider/type_destination_milvus_embedding_from_field.go +internal/provider/type_destination_milvus_embedding.go +internal/provider/type_destination_milvus_indexing_authentication_api_token.go +internal/provider/type_destination_milvus_indexing_authentication_no_auth.go +internal/provider/type_destination_milvus_indexing_authentication_username_password.go +internal/provider/type_destination_milvus_indexing_authentication.go +internal/provider/type_destination_milvus_indexing.go +internal/provider/type_destination_milvus_processing_config_model.go +internal/provider/type_destination_milvus.go internal/provider/type_destination_mongodb_authorization_type_login_password.go internal/provider/type_destination_mongodb_authorization_type_none.go internal/provider/type_destination_mongodb_authorization_type.go @@ -117,6 +127,9 @@ internal/provider/type_destination_mysql_ssh_tunnel_method.go internal/provider/type_destination_mysql.go internal/provider/type_destination_oracle_ssh_tunnel_method.go internal/provider/type_destination_oracle.go +internal/provider/type_destination_pinecone_embedding.go +internal/provider/type_destination_pinecone_indexing.go +internal/provider/type_destination_pinecone.go internal/provider/type_destination_postgres_ssl_modes_allow.go internal/provider/type_destination_postgres_ssl_modes_disable.go internal/provider/type_destination_postgres_ssl_modes_prefer.go @@ -236,8 +249,6 @@ internal/provider/type_source_coinmarketcap.go internal/provider/type_source_configcat.go internal/provider/type_source_confluence.go internal/provider/type_source_convex.go -internal/provider/type_source_datadog_queries.go -internal/provider/type_source_datadog.go internal/provider/type_source_datascope.go internal/provider/type_source_delighted.go internal/provider/type_source_dixa.go @@ -302,6 +313,7 @@ internal/provider/type_source_google_pagespeed_insights.go internal/provider/type_source_google_search_console_authentication_type_o_auth.go internal/provider/type_source_google_search_console_authentication_type_service_account_key_authentication.go internal/provider/type_source_google_search_console_authentication_type.go +internal/provider/type_source_google_search_console_custom_report_config.go internal/provider/type_source_google_search_console.go internal/provider/type_source_google_sheets_authentication_authenticate_via_google_o_auth.go internal/provider/type_source_google_sheets_authentication_service_account_key_authentication.go @@ -376,15 +388,14 @@ internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atl internal/provider/type_source_mongodb_mongo_db_instance_type.go internal/provider/type_source_mongodb.go internal/provider/type_source_mongodb_internal_poc.go -internal/provider/type_source_mssql_replication_method_logical_replication_cdc.go -internal/provider/type_source_mssql_replication_method_standard.go -internal/provider/type_source_mssql_replication_method.go +internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go +internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go +internal/provider/type_source_mssql_update_method.go internal/provider/type_source_mssql_ssl_method.go internal/provider/type_source_mssql_ssh_tunnel_method.go internal/provider/type_source_mssql.go internal/provider/type_source_my_hours.go internal/provider/type_source_mysql_update_method_read_changes_using_binary_log_cdc.go -internal/provider/type_source_mysql_update_method_scan_changes_with_user_defined_cursor.go internal/provider/type_source_mysql_update_method.go internal/provider/type_source_mysql_ssl_modes_preferred.go internal/provider/type_source_mysql_ssl_modes_required.go @@ -405,7 +416,6 @@ internal/provider/type_source_okta.go internal/provider/type_source_omnisend.go internal/provider/type_source_onesignal_applications.go internal/provider/type_source_onesignal.go -internal/provider/type_source_openweather.go internal/provider/type_source_oracle_connect_by_service_name.go internal/provider/type_source_oracle_connect_by_system_idsid.go internal/provider/type_source_oracle_connect_by.go @@ -434,9 +444,11 @@ internal/provider/type_source_pipedrive.go internal/provider/type_source_pocket.go internal/provider/type_source_pokeapi.go internal/provider/type_source_polygon_stock_api.go -internal/provider/type_source_postgres_replication_method_logical_replication_cdc.go -internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc.go -internal/provider/type_source_postgres_replication_method.go +internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go +internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go +internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go +internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go +internal/provider/type_source_postgres_update_method.go internal/provider/type_source_postgres_ssl_modes_allow.go internal/provider/type_source_postgres_ssl_modes_disable.go internal/provider/type_source_postgres_ssl_modes_prefer.go @@ -455,7 +467,6 @@ internal/provider/type_source_postgres.go internal/provider/type_source_posthog.go internal/provider/type_source_postmarkapp.go internal/provider/type_source_prestashop.go -internal/provider/type_source_public_apis.go internal/provider/type_source_punk_api.go internal/provider/type_source_pypi.go internal/provider/type_source_qualaroo.go @@ -482,6 +493,18 @@ internal/provider/type_source_s3_file_format_jsonl.go internal/provider/type_source_s3_file_format_parquet.go internal/provider/type_source_s3_file_format.go internal/provider/type_source_s3_s3_amazon_web_services.go +internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go +internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go +internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go +internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go +internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go +internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go +internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go +internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go +internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go +internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go +internal/provider/type_source_s3_file_based_stream_config_format.go +internal/provider/type_source_s3_file_based_stream_config.go internal/provider/type_source_s3.go internal/provider/type_source_salesforce_streams_criteria.go internal/provider/type_source_salesforce.go @@ -568,8 +591,6 @@ internal/provider/type_source_zendesk_chat_authorization_method.go internal/provider/type_source_zendesk_chat.go internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go -internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token.go -internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth20.go internal/provider/type_source_zendesk_sunshine_authorization_method.go internal/provider/type_source_zendesk_sunshine.go internal/provider/type_source_zendesk_support_authentication_api_token.go @@ -628,9 +649,9 @@ internal/provider/type_source_mongodb_mongo_db_instance_type_mongo_db_atlas1.go internal/provider/type_source_mongodb_update_mongo_db_instance_type_mongo_db_atlas1.go internal/provider/type_source_mongodb_mongo_db_instance_type1.go internal/provider/type_source_mongodb1.go -internal/provider/type_source_postgres_replication_method_logical_replication_cdc1.go -internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc1.go -internal/provider/type_source_postgres_replication_method1.go +internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go +internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go +internal/provider/type_source_postgres_update_method1.go internal/provider/type_source_postgres_ssl_modes_allow1.go internal/provider/type_source_postgres_ssl_modes_disable1.go internal/provider/type_source_postgres_ssl_modes_prefer1.go @@ -653,12 +674,6 @@ internal/provider/type_source_retently_authentication_mechanism1.go internal/provider/type_source_retently1.go internal/provider/type_source_youtube_analytics_authenticate_via_o_auth201.go internal/provider/type_source_youtube_analytics1.go -internal/provider/type_source_zendesk_sunshine_authorization_method_api_token1.go -internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth201.go -internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token1.go -internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth201.go -internal/provider/type_source_zendesk_sunshine_authorization_method1.go -internal/provider/type_source_zendesk_sunshine1.go internal/provider/type_source_zendesk_support_authentication_api_token1.go internal/provider/type_source_zendesk_support_authentication_o_auth201.go internal/provider/type_source_zendesk_support_update_authentication_api_token1.go @@ -743,10 +758,12 @@ internal/sdk/pkg/models/operations/createdestinationgooglesheets.go internal/sdk/pkg/models/operations/createdestinationkeen.go internal/sdk/pkg/models/operations/createdestinationkinesis.go internal/sdk/pkg/models/operations/createdestinationlangchain.go +internal/sdk/pkg/models/operations/createdestinationmilvus.go internal/sdk/pkg/models/operations/createdestinationmongodb.go internal/sdk/pkg/models/operations/createdestinationmssql.go internal/sdk/pkg/models/operations/createdestinationmysql.go internal/sdk/pkg/models/operations/createdestinationoracle.go +internal/sdk/pkg/models/operations/createdestinationpinecone.go internal/sdk/pkg/models/operations/createdestinationpostgres.go internal/sdk/pkg/models/operations/createdestinationpubsub.go internal/sdk/pkg/models/operations/createdestinationredis.go @@ -779,10 +796,12 @@ internal/sdk/pkg/models/operations/deletedestinationgooglesheets.go internal/sdk/pkg/models/operations/deletedestinationkeen.go internal/sdk/pkg/models/operations/deletedestinationkinesis.go internal/sdk/pkg/models/operations/deletedestinationlangchain.go +internal/sdk/pkg/models/operations/deletedestinationmilvus.go internal/sdk/pkg/models/operations/deletedestinationmongodb.go internal/sdk/pkg/models/operations/deletedestinationmssql.go internal/sdk/pkg/models/operations/deletedestinationmysql.go internal/sdk/pkg/models/operations/deletedestinationoracle.go +internal/sdk/pkg/models/operations/deletedestinationpinecone.go internal/sdk/pkg/models/operations/deletedestinationpostgres.go internal/sdk/pkg/models/operations/deletedestinationpubsub.go internal/sdk/pkg/models/operations/deletedestinationredis.go @@ -815,10 +834,12 @@ internal/sdk/pkg/models/operations/getdestinationgooglesheets.go internal/sdk/pkg/models/operations/getdestinationkeen.go internal/sdk/pkg/models/operations/getdestinationkinesis.go internal/sdk/pkg/models/operations/getdestinationlangchain.go +internal/sdk/pkg/models/operations/getdestinationmilvus.go internal/sdk/pkg/models/operations/getdestinationmongodb.go internal/sdk/pkg/models/operations/getdestinationmssql.go internal/sdk/pkg/models/operations/getdestinationmysql.go internal/sdk/pkg/models/operations/getdestinationoracle.go +internal/sdk/pkg/models/operations/getdestinationpinecone.go internal/sdk/pkg/models/operations/getdestinationpostgres.go internal/sdk/pkg/models/operations/getdestinationpubsub.go internal/sdk/pkg/models/operations/getdestinationredis.go @@ -853,10 +874,12 @@ internal/sdk/pkg/models/operations/putdestinationgooglesheets.go internal/sdk/pkg/models/operations/putdestinationkeen.go internal/sdk/pkg/models/operations/putdestinationkinesis.go internal/sdk/pkg/models/operations/putdestinationlangchain.go +internal/sdk/pkg/models/operations/putdestinationmilvus.go internal/sdk/pkg/models/operations/putdestinationmongodb.go internal/sdk/pkg/models/operations/putdestinationmssql.go internal/sdk/pkg/models/operations/putdestinationmysql.go internal/sdk/pkg/models/operations/putdestinationoracle.go +internal/sdk/pkg/models/operations/putdestinationpinecone.go internal/sdk/pkg/models/operations/putdestinationpostgres.go internal/sdk/pkg/models/operations/putdestinationpubsub.go internal/sdk/pkg/models/operations/putdestinationredis.go @@ -907,7 +930,6 @@ internal/sdk/pkg/models/operations/createsourcecoinmarketcap.go internal/sdk/pkg/models/operations/createsourceconfigcat.go internal/sdk/pkg/models/operations/createsourceconfluence.go internal/sdk/pkg/models/operations/createsourceconvex.go -internal/sdk/pkg/models/operations/createsourcedatadog.go internal/sdk/pkg/models/operations/createsourcedatascope.go internal/sdk/pkg/models/operations/createsourcedelighted.go internal/sdk/pkg/models/operations/createsourcedixa.go @@ -985,7 +1007,6 @@ internal/sdk/pkg/models/operations/createsourcenytimes.go internal/sdk/pkg/models/operations/createsourceokta.go internal/sdk/pkg/models/operations/createsourceomnisend.go internal/sdk/pkg/models/operations/createsourceonesignal.go -internal/sdk/pkg/models/operations/createsourceopenweather.go internal/sdk/pkg/models/operations/createsourceoracle.go internal/sdk/pkg/models/operations/createsourceorb.go internal/sdk/pkg/models/operations/createsourceorbit.go @@ -1005,7 +1026,6 @@ internal/sdk/pkg/models/operations/createsourcepostgres.go internal/sdk/pkg/models/operations/createsourceposthog.go internal/sdk/pkg/models/operations/createsourcepostmarkapp.go internal/sdk/pkg/models/operations/createsourceprestashop.go -internal/sdk/pkg/models/operations/createsourcepublicapis.go internal/sdk/pkg/models/operations/createsourcepunkapi.go internal/sdk/pkg/models/operations/createsourcepypi.go internal/sdk/pkg/models/operations/createsourcequalaroo.go @@ -1110,7 +1130,6 @@ internal/sdk/pkg/models/operations/deletesourcecoinmarketcap.go internal/sdk/pkg/models/operations/deletesourceconfigcat.go internal/sdk/pkg/models/operations/deletesourceconfluence.go internal/sdk/pkg/models/operations/deletesourceconvex.go -internal/sdk/pkg/models/operations/deletesourcedatadog.go internal/sdk/pkg/models/operations/deletesourcedatascope.go internal/sdk/pkg/models/operations/deletesourcedelighted.go internal/sdk/pkg/models/operations/deletesourcedixa.go @@ -1188,7 +1207,6 @@ internal/sdk/pkg/models/operations/deletesourcenytimes.go internal/sdk/pkg/models/operations/deletesourceokta.go internal/sdk/pkg/models/operations/deletesourceomnisend.go internal/sdk/pkg/models/operations/deletesourceonesignal.go -internal/sdk/pkg/models/operations/deletesourceopenweather.go internal/sdk/pkg/models/operations/deletesourceoracle.go internal/sdk/pkg/models/operations/deletesourceorb.go internal/sdk/pkg/models/operations/deletesourceorbit.go @@ -1208,7 +1226,6 @@ internal/sdk/pkg/models/operations/deletesourcepostgres.go internal/sdk/pkg/models/operations/deletesourceposthog.go internal/sdk/pkg/models/operations/deletesourcepostmarkapp.go internal/sdk/pkg/models/operations/deletesourceprestashop.go -internal/sdk/pkg/models/operations/deletesourcepublicapis.go internal/sdk/pkg/models/operations/deletesourcepunkapi.go internal/sdk/pkg/models/operations/deletesourcepypi.go internal/sdk/pkg/models/operations/deletesourcequalaroo.go @@ -1313,7 +1330,6 @@ internal/sdk/pkg/models/operations/getsourcecoinmarketcap.go internal/sdk/pkg/models/operations/getsourceconfigcat.go internal/sdk/pkg/models/operations/getsourceconfluence.go internal/sdk/pkg/models/operations/getsourceconvex.go -internal/sdk/pkg/models/operations/getsourcedatadog.go internal/sdk/pkg/models/operations/getsourcedatascope.go internal/sdk/pkg/models/operations/getsourcedelighted.go internal/sdk/pkg/models/operations/getsourcedixa.go @@ -1391,7 +1407,6 @@ internal/sdk/pkg/models/operations/getsourcenytimes.go internal/sdk/pkg/models/operations/getsourceokta.go internal/sdk/pkg/models/operations/getsourceomnisend.go internal/sdk/pkg/models/operations/getsourceonesignal.go -internal/sdk/pkg/models/operations/getsourceopenweather.go internal/sdk/pkg/models/operations/getsourceoracle.go internal/sdk/pkg/models/operations/getsourceorb.go internal/sdk/pkg/models/operations/getsourceorbit.go @@ -1411,7 +1426,6 @@ internal/sdk/pkg/models/operations/getsourcepostgres.go internal/sdk/pkg/models/operations/getsourceposthog.go internal/sdk/pkg/models/operations/getsourcepostmarkapp.go internal/sdk/pkg/models/operations/getsourceprestashop.go -internal/sdk/pkg/models/operations/getsourcepublicapis.go internal/sdk/pkg/models/operations/getsourcepunkapi.go internal/sdk/pkg/models/operations/getsourcepypi.go internal/sdk/pkg/models/operations/getsourcequalaroo.go @@ -1519,7 +1533,6 @@ internal/sdk/pkg/models/operations/putsourcecoinmarketcap.go internal/sdk/pkg/models/operations/putsourceconfigcat.go internal/sdk/pkg/models/operations/putsourceconfluence.go internal/sdk/pkg/models/operations/putsourceconvex.go -internal/sdk/pkg/models/operations/putsourcedatadog.go internal/sdk/pkg/models/operations/putsourcedatascope.go internal/sdk/pkg/models/operations/putsourcedelighted.go internal/sdk/pkg/models/operations/putsourcedixa.go @@ -1597,7 +1610,6 @@ internal/sdk/pkg/models/operations/putsourcenytimes.go internal/sdk/pkg/models/operations/putsourceokta.go internal/sdk/pkg/models/operations/putsourceomnisend.go internal/sdk/pkg/models/operations/putsourceonesignal.go -internal/sdk/pkg/models/operations/putsourceopenweather.go internal/sdk/pkg/models/operations/putsourceoracle.go internal/sdk/pkg/models/operations/putsourceorb.go internal/sdk/pkg/models/operations/putsourceorbit.go @@ -1617,7 +1629,6 @@ internal/sdk/pkg/models/operations/putsourcepostgres.go internal/sdk/pkg/models/operations/putsourceposthog.go internal/sdk/pkg/models/operations/putsourcepostmarkapp.go internal/sdk/pkg/models/operations/putsourceprestashop.go -internal/sdk/pkg/models/operations/putsourcepublicapis.go internal/sdk/pkg/models/operations/putsourcepunkapi.go internal/sdk/pkg/models/operations/putsourcepypi.go internal/sdk/pkg/models/operations/putsourcequalaroo.go @@ -1753,6 +1764,8 @@ internal/sdk/pkg/models/shared/destinationkinesiscreaterequest.go internal/sdk/pkg/models/shared/destinationkinesis.go internal/sdk/pkg/models/shared/destinationlangchaincreaterequest.go internal/sdk/pkg/models/shared/destinationlangchain.go +internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go +internal/sdk/pkg/models/shared/destinationmilvus.go internal/sdk/pkg/models/shared/destinationmongodbcreaterequest.go internal/sdk/pkg/models/shared/destinationmongodb.go internal/sdk/pkg/models/shared/destinationmssqlcreaterequest.go @@ -1761,6 +1774,8 @@ internal/sdk/pkg/models/shared/destinationmysqlcreaterequest.go internal/sdk/pkg/models/shared/destinationmysql.go internal/sdk/pkg/models/shared/destinationoraclecreaterequest.go internal/sdk/pkg/models/shared/destinationoracle.go +internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go +internal/sdk/pkg/models/shared/destinationpinecone.go internal/sdk/pkg/models/shared/destinationpostgrescreaterequest.go internal/sdk/pkg/models/shared/destinationpostgres.go internal/sdk/pkg/models/shared/destinationpubsubcreaterequest.go @@ -1826,6 +1841,8 @@ internal/sdk/pkg/models/shared/destinationkinesisputrequest.go internal/sdk/pkg/models/shared/destinationkinesisupdate.go internal/sdk/pkg/models/shared/destinationlangchainputrequest.go internal/sdk/pkg/models/shared/destinationlangchainupdate.go +internal/sdk/pkg/models/shared/destinationmilvusputrequest.go +internal/sdk/pkg/models/shared/destinationmilvusupdate.go internal/sdk/pkg/models/shared/destinationmongodbputrequest.go internal/sdk/pkg/models/shared/destinationmongodbupdate.go internal/sdk/pkg/models/shared/destinationmssqlputrequest.go @@ -1834,6 +1851,8 @@ internal/sdk/pkg/models/shared/destinationmysqlputrequest.go internal/sdk/pkg/models/shared/destinationmysqlupdate.go internal/sdk/pkg/models/shared/destinationoracleputrequest.go internal/sdk/pkg/models/shared/destinationoracleupdate.go +internal/sdk/pkg/models/shared/destinationpineconeputrequest.go +internal/sdk/pkg/models/shared/destinationpineconeupdate.go internal/sdk/pkg/models/shared/destinationpostgresputrequest.go internal/sdk/pkg/models/shared/destinationpostgresupdate.go internal/sdk/pkg/models/shared/destinationpubsubputrequest.go @@ -1931,8 +1950,6 @@ internal/sdk/pkg/models/shared/sourceconfluencecreaterequest.go internal/sdk/pkg/models/shared/sourceconfluence.go internal/sdk/pkg/models/shared/sourceconvexcreaterequest.go internal/sdk/pkg/models/shared/sourceconvex.go -internal/sdk/pkg/models/shared/sourcedatadogcreaterequest.go -internal/sdk/pkg/models/shared/sourcedatadog.go internal/sdk/pkg/models/shared/sourcedatascopecreaterequest.go internal/sdk/pkg/models/shared/sourcedatascope.go internal/sdk/pkg/models/shared/sourcedelightedcreaterequest.go @@ -2087,8 +2104,6 @@ internal/sdk/pkg/models/shared/sourceomnisendcreaterequest.go internal/sdk/pkg/models/shared/sourceomnisend.go internal/sdk/pkg/models/shared/sourceonesignalcreaterequest.go internal/sdk/pkg/models/shared/sourceonesignal.go -internal/sdk/pkg/models/shared/sourceopenweathercreaterequest.go -internal/sdk/pkg/models/shared/sourceopenweather.go internal/sdk/pkg/models/shared/sourceoraclecreaterequest.go internal/sdk/pkg/models/shared/sourceoracle.go internal/sdk/pkg/models/shared/sourceorbcreaterequest.go @@ -2127,8 +2142,6 @@ internal/sdk/pkg/models/shared/sourcepostmarkappcreaterequest.go internal/sdk/pkg/models/shared/sourcepostmarkapp.go internal/sdk/pkg/models/shared/sourceprestashopcreaterequest.go internal/sdk/pkg/models/shared/sourceprestashop.go -internal/sdk/pkg/models/shared/sourcepublicapiscreaterequest.go -internal/sdk/pkg/models/shared/sourcepublicapis.go internal/sdk/pkg/models/shared/sourcepunkapicreaterequest.go internal/sdk/pkg/models/shared/sourcepunkapi.go internal/sdk/pkg/models/shared/sourcepypicreaterequest.go @@ -2340,8 +2353,6 @@ internal/sdk/pkg/models/shared/sourceconfluenceputrequest.go internal/sdk/pkg/models/shared/sourceconfluenceupdate.go internal/sdk/pkg/models/shared/sourceconvexputrequest.go internal/sdk/pkg/models/shared/sourceconvexupdate.go -internal/sdk/pkg/models/shared/sourcedatadogputrequest.go -internal/sdk/pkg/models/shared/sourcedatadogupdate.go internal/sdk/pkg/models/shared/sourcedatascopeputrequest.go internal/sdk/pkg/models/shared/sourcedatascopeupdate.go internal/sdk/pkg/models/shared/sourcedelightedputrequest.go @@ -2496,8 +2507,6 @@ internal/sdk/pkg/models/shared/sourceomnisendputrequest.go internal/sdk/pkg/models/shared/sourceomnisendupdate.go internal/sdk/pkg/models/shared/sourceonesignalputrequest.go internal/sdk/pkg/models/shared/sourceonesignalupdate.go -internal/sdk/pkg/models/shared/sourceopenweatherputrequest.go -internal/sdk/pkg/models/shared/sourceopenweatherupdate.go internal/sdk/pkg/models/shared/sourceoracleputrequest.go internal/sdk/pkg/models/shared/sourceoracleupdate.go internal/sdk/pkg/models/shared/sourceorbputrequest.go @@ -2536,8 +2545,6 @@ internal/sdk/pkg/models/shared/sourcepostmarkappputrequest.go internal/sdk/pkg/models/shared/sourcepostmarkappupdate.go internal/sdk/pkg/models/shared/sourceprestashopputrequest.go internal/sdk/pkg/models/shared/sourceprestashopupdate.go -internal/sdk/pkg/models/shared/sourcepublicapisputrequest.go -internal/sdk/pkg/models/shared/sourcepublicapisupdate.go internal/sdk/pkg/models/shared/sourcepunkapiputrequest.go internal/sdk/pkg/models/shared/sourcepunkapiupdate.go internal/sdk/pkg/models/shared/sourcepypiputrequest.go @@ -2688,8 +2695,6 @@ internal/sdk/pkg/models/shared/workspacecreaterequest.go internal/sdk/pkg/models/shared/workspacesresponse.go internal/sdk/pkg/models/shared/workspaceupdaterequest.go internal/sdk/pkg/models/shared/security.go -internal/sdk/pkg/models/shared/connectionresponseroottypeforconnectionresponse.go -internal/sdk/pkg/models/shared/workspaceresponseroottypeforworkspaceresponse.go USAGE.md internal/provider/provider.go examples/provider/provider.tf @@ -2753,6 +2758,9 @@ examples/resources/airbyte_destination_kinesis/resource.tf internal/provider/destination_langchain_resource.go internal/provider/destination_langchain_resource_sdk.go examples/resources/airbyte_destination_langchain/resource.tf +internal/provider/destination_milvus_resource.go +internal/provider/destination_milvus_resource_sdk.go +examples/resources/airbyte_destination_milvus/resource.tf internal/provider/destination_mongodb_resource.go internal/provider/destination_mongodb_resource_sdk.go examples/resources/airbyte_destination_mongodb/resource.tf @@ -2765,6 +2773,9 @@ examples/resources/airbyte_destination_mysql/resource.tf internal/provider/destination_oracle_resource.go internal/provider/destination_oracle_resource_sdk.go examples/resources/airbyte_destination_oracle/resource.tf +internal/provider/destination_pinecone_resource.go +internal/provider/destination_pinecone_resource_sdk.go +examples/resources/airbyte_destination_pinecone/resource.tf internal/provider/destination_postgres_resource.go internal/provider/destination_postgres_resource_sdk.go examples/resources/airbyte_destination_postgres/resource.tf @@ -2900,9 +2911,6 @@ examples/resources/airbyte_source_confluence/resource.tf internal/provider/source_convex_resource.go internal/provider/source_convex_resource_sdk.go examples/resources/airbyte_source_convex/resource.tf -internal/provider/source_datadog_resource.go -internal/provider/source_datadog_resource_sdk.go -examples/resources/airbyte_source_datadog/resource.tf internal/provider/source_datascope_resource.go internal/provider/source_datascope_resource_sdk.go examples/resources/airbyte_source_datascope/resource.tf @@ -3134,9 +3142,6 @@ examples/resources/airbyte_source_omnisend/resource.tf internal/provider/source_onesignal_resource.go internal/provider/source_onesignal_resource_sdk.go examples/resources/airbyte_source_onesignal/resource.tf -internal/provider/source_openweather_resource.go -internal/provider/source_openweather_resource_sdk.go -examples/resources/airbyte_source_openweather/resource.tf internal/provider/source_oracle_resource.go internal/provider/source_oracle_resource_sdk.go examples/resources/airbyte_source_oracle/resource.tf @@ -3194,9 +3199,6 @@ examples/resources/airbyte_source_postmarkapp/resource.tf internal/provider/source_prestashop_resource.go internal/provider/source_prestashop_resource_sdk.go examples/resources/airbyte_source_prestashop/resource.tf -internal/provider/source_publicapis_resource.go -internal/provider/source_publicapis_resource_sdk.go -examples/resources/airbyte_source_public_apis/resource.tf internal/provider/source_punkapi_resource.go internal/provider/source_punkapi_resource_sdk.go examples/resources/airbyte_source_punk_api/resource.tf @@ -3470,6 +3472,9 @@ examples/data-sources/airbyte_destination_kinesis/data-source.tf internal/provider/destination_langchain_data_source.go internal/provider/destination_langchain_data_source_sdk.go examples/data-sources/airbyte_destination_langchain/data-source.tf +internal/provider/destination_milvus_data_source.go +internal/provider/destination_milvus_data_source_sdk.go +examples/data-sources/airbyte_destination_milvus/data-source.tf internal/provider/destination_mongodb_data_source.go internal/provider/destination_mongodb_data_source_sdk.go examples/data-sources/airbyte_destination_mongodb/data-source.tf @@ -3482,6 +3487,9 @@ examples/data-sources/airbyte_destination_mysql/data-source.tf internal/provider/destination_oracle_data_source.go internal/provider/destination_oracle_data_source_sdk.go examples/data-sources/airbyte_destination_oracle/data-source.tf +internal/provider/destination_pinecone_data_source.go +internal/provider/destination_pinecone_data_source_sdk.go +examples/data-sources/airbyte_destination_pinecone/data-source.tf internal/provider/destination_postgres_data_source.go internal/provider/destination_postgres_data_source_sdk.go examples/data-sources/airbyte_destination_postgres/data-source.tf @@ -3617,9 +3625,6 @@ examples/data-sources/airbyte_source_confluence/data-source.tf internal/provider/source_convex_data_source.go internal/provider/source_convex_data_source_sdk.go examples/data-sources/airbyte_source_convex/data-source.tf -internal/provider/source_datadog_data_source.go -internal/provider/source_datadog_data_source_sdk.go -examples/data-sources/airbyte_source_datadog/data-source.tf internal/provider/source_datascope_data_source.go internal/provider/source_datascope_data_source_sdk.go examples/data-sources/airbyte_source_datascope/data-source.tf @@ -3851,9 +3856,6 @@ examples/data-sources/airbyte_source_omnisend/data-source.tf internal/provider/source_onesignal_data_source.go internal/provider/source_onesignal_data_source_sdk.go examples/data-sources/airbyte_source_onesignal/data-source.tf -internal/provider/source_openweather_data_source.go -internal/provider/source_openweather_data_source_sdk.go -examples/data-sources/airbyte_source_openweather/data-source.tf internal/provider/source_oracle_data_source.go internal/provider/source_oracle_data_source_sdk.go examples/data-sources/airbyte_source_oracle/data-source.tf @@ -3911,9 +3913,6 @@ examples/data-sources/airbyte_source_postmarkapp/data-source.tf internal/provider/source_prestashop_data_source.go internal/provider/source_prestashop_data_source_sdk.go examples/data-sources/airbyte_source_prestashop/data-source.tf -internal/provider/source_publicapis_data_source.go -internal/provider/source_publicapis_data_source_sdk.go -examples/data-sources/airbyte_source_public_apis/data-source.tf internal/provider/source_punkapi_data_source.go internal/provider/source_punkapi_data_source_sdk.go examples/data-sources/airbyte_source_punk_api/data-source.tf @@ -4126,4 +4125,5 @@ internal/provider/source_zuora_data_source_sdk.go examples/data-sources/airbyte_source_zuora/data-source.tf internal/provider/workspace_data_source.go internal/provider/workspace_data_source_sdk.go -examples/data-sources/airbyte_workspace/data-source.tf \ No newline at end of file +examples/data-sources/airbyte_workspace/data-source.tf +.gitattributes \ No newline at end of file diff --git a/gen.yaml b/gen.yaml index 442239b12..72bf23f81 100755 --- a/gen.yaml +++ b/gen.yaml @@ -5,11 +5,11 @@ generation: telemetryEnabled: true features: terraform: - core: 2.83.3 + core: 2.88.0 globalSecurity: 2.81.1 globalServerURLs: 2.82.0 includes: 2.81.1 terraform: - version: 0.3.3 + version: 0.3.4 author: airbytehq packageName: airbyte diff --git a/internal/provider/destination_bigquery_data_source.go b/internal/provider/destination_bigquery_data_source.go index 82dcdeb48..3a009203a 100755 --- a/internal/provider/destination_bigquery_data_source.go +++ b/internal/provider/destination_bigquery_data_source.go @@ -307,7 +307,7 @@ func (r *DestinationBigqueryDataSource) Schema(ctx context.Context, req datasour }, "raw_data_dataset": schema.StringAttribute{ Computed: true, - Description: `(Early Access) The dataset to write raw tables into`, + Description: `The dataset to write raw tables into`, }, "transformation_priority": schema.StringAttribute{ Computed: true, @@ -320,10 +320,6 @@ func (r *DestinationBigqueryDataSource) Schema(ctx context.Context, req datasour MarkdownDescription: `must be one of ["interactive", "batch"]` + "\n" + `Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.`, }, - "use_1s1t_format": schema.BoolAttribute{ - Computed: true, - Description: `(Early Access) Use Destinations V2.`, - }, }, }, "destination_id": schema.StringAttribute{ diff --git a/internal/provider/destination_bigquery_resource.go b/internal/provider/destination_bigquery_resource.go index c31b8890c..dac4e5752 100755 --- a/internal/provider/destination_bigquery_resource.go +++ b/internal/provider/destination_bigquery_resource.go @@ -309,7 +309,7 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S }, "raw_data_dataset": schema.StringAttribute{ Optional: true, - Description: `(Early Access) The dataset to write raw tables into`, + Description: `The dataset to write raw tables into`, }, "transformation_priority": schema.StringAttribute{ Optional: true, @@ -322,10 +322,6 @@ func (r *DestinationBigqueryResource) Schema(ctx context.Context, req resource.S MarkdownDescription: `must be one of ["interactive", "batch"]` + "\n" + `Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly.`, }, - "use_1s1t_format": schema.BoolAttribute{ - Optional: true, - Description: `(Early Access) Use Destinations V2.`, - }, }, }, "destination_id": schema.StringAttribute{ diff --git a/internal/provider/destination_bigquery_resource_sdk.go b/internal/provider/destination_bigquery_resource_sdk.go index 3fb5ec5cd..94e19ecd3 100755 --- a/internal/provider/destination_bigquery_resource_sdk.go +++ b/internal/provider/destination_bigquery_resource_sdk.go @@ -99,12 +99,6 @@ func (r *DestinationBigqueryResourceModel) ToCreateSDKType() *shared.Destination } else { transformationPriority = nil } - use1s1tFormat := new(bool) - if !r.Configuration.Use1s1tFormat.IsUnknown() && !r.Configuration.Use1s1tFormat.IsNull() { - *use1s1tFormat = r.Configuration.Use1s1tFormat.ValueBool() - } else { - use1s1tFormat = nil - } configuration := shared.DestinationBigquery{ BigQueryClientBufferSizeMb: bigQueryClientBufferSizeMb, CredentialsJSON: credentialsJSON, @@ -115,7 +109,6 @@ func (r *DestinationBigqueryResourceModel) ToCreateSDKType() *shared.Destination ProjectID: projectID, RawDataDataset: rawDataDataset, TransformationPriority: transformationPriority, - Use1s1tFormat: use1s1tFormat, } name := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() @@ -223,12 +216,6 @@ func (r *DestinationBigqueryResourceModel) ToUpdateSDKType() *shared.Destination } else { transformationPriority = nil } - use1s1tFormat := new(bool) - if !r.Configuration.Use1s1tFormat.IsUnknown() && !r.Configuration.Use1s1tFormat.IsNull() { - *use1s1tFormat = r.Configuration.Use1s1tFormat.ValueBool() - } else { - use1s1tFormat = nil - } configuration := shared.DestinationBigqueryUpdate{ BigQueryClientBufferSizeMb: bigQueryClientBufferSizeMb, CredentialsJSON: credentialsJSON, @@ -238,7 +225,6 @@ func (r *DestinationBigqueryResourceModel) ToUpdateSDKType() *shared.Destination ProjectID: projectID, RawDataDataset: rawDataDataset, TransformationPriority: transformationPriority, - Use1s1tFormat: use1s1tFormat, } name := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() diff --git a/internal/provider/destination_milvus_data_source.go b/internal/provider/destination_milvus_data_source.go new file mode 100755 index 000000000..f78664b73 --- /dev/null +++ b/internal/provider/destination_milvus_data_source.go @@ -0,0 +1,474 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import ( + "airbyte/internal/sdk" + "airbyte/internal/sdk/pkg/models/operations" + "context" + "fmt" + + "airbyte/internal/validators" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// Ensure provider defined types fully satisfy framework interfaces. +var _ datasource.DataSource = &DestinationMilvusDataSource{} +var _ datasource.DataSourceWithConfigure = &DestinationMilvusDataSource{} + +func NewDestinationMilvusDataSource() datasource.DataSource { + return &DestinationMilvusDataSource{} +} + +// DestinationMilvusDataSource is the data source implementation. +type DestinationMilvusDataSource struct { + client *sdk.SDK +} + +// DestinationMilvusDataSourceModel describes the data model. +type DestinationMilvusDataSourceModel struct { + Configuration DestinationMilvus `tfsdk:"configuration"` + DestinationID types.String `tfsdk:"destination_id"` + Name types.String `tfsdk:"name"` + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +// Metadata returns the data source type name. +func (r *DestinationMilvusDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_destination_milvus" +} + +// Schema defines the schema for the data source. +func (r *DestinationMilvusDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "DestinationMilvus DataSource", + + Attributes: map[string]schema.Attribute{ + "configuration": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "destination_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "milvus", + ), + }, + Description: `must be one of ["milvus"]`, + }, + "embedding": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "destination_milvus_embedding_cohere": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Computed: true, + }, + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_milvus_embedding_fake": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_milvus_embedding_from_field": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "dimensions": schema.Int64Attribute{ + Computed: true, + Description: `The number of dimensions the embedding model is generating`, + }, + "field_name": schema.StringAttribute{ + Computed: true, + Description: `Name of the field in the record that contains the embedding`, + }, + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "from_field", + ), + }, + Description: `must be one of ["from_field"]`, + }, + }, + Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`, + }, + "destination_milvus_embedding_open_ai": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Computed: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + "destination_milvus_update_embedding_cohere": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Computed: true, + }, + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_milvus_update_embedding_fake": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_milvus_update_embedding_from_field": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "dimensions": schema.Int64Attribute{ + Computed: true, + Description: `The number of dimensions the embedding model is generating`, + }, + "field_name": schema.StringAttribute{ + Computed: true, + Description: `Name of the field in the record that contains the embedding`, + }, + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "from_field", + ), + }, + Description: `must be one of ["from_field"]`, + }, + }, + Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`, + }, + "destination_milvus_update_embedding_open_ai": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Computed: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `Embedding configuration`, + }, + "indexing": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "auth": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "destination_milvus_indexing_authentication_api_token": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "token", + ), + }, + Description: `must be one of ["token"]`, + }, + "token": schema.StringAttribute{ + Computed: true, + Description: `API Token for the Milvus instance`, + }, + }, + Description: `Authenticate using an API token (suitable for Zilliz Cloud)`, + }, + "destination_milvus_indexing_authentication_no_auth": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "no_auth", + ), + }, + Description: `must be one of ["no_auth"]`, + }, + }, + Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`, + }, + "destination_milvus_indexing_authentication_username_password": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "username_password", + ), + }, + Description: `must be one of ["username_password"]`, + }, + "password": schema.StringAttribute{ + Computed: true, + Description: `Password for the Milvus instance`, + }, + "username": schema.StringAttribute{ + Computed: true, + Description: `Username for the Milvus instance`, + }, + }, + Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`, + }, + "destination_milvus_update_indexing_authentication_api_token": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "token", + ), + }, + Description: `must be one of ["token"]`, + }, + "token": schema.StringAttribute{ + Computed: true, + Description: `API Token for the Milvus instance`, + }, + }, + Description: `Authenticate using an API token (suitable for Zilliz Cloud)`, + }, + "destination_milvus_update_indexing_authentication_no_auth": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "no_auth", + ), + }, + Description: `must be one of ["no_auth"]`, + }, + }, + Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`, + }, + "destination_milvus_update_indexing_authentication_username_password": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "username_password", + ), + }, + Description: `must be one of ["username_password"]`, + }, + "password": schema.StringAttribute{ + Computed: true, + Description: `Password for the Milvus instance`, + }, + "username": schema.StringAttribute{ + Computed: true, + Description: `Username for the Milvus instance`, + }, + }, + Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `Authentication method`, + }, + "collection": schema.StringAttribute{ + Computed: true, + Description: `The collection to load data into`, + }, + "db": schema.StringAttribute{ + Computed: true, + Description: `The database to connect to`, + }, + "host": schema.StringAttribute{ + Computed: true, + Description: `The public endpoint of the Milvus instance. `, + }, + "text_field": schema.StringAttribute{ + Computed: true, + Description: `The field in the entity that contains the embedded text`, + }, + "vector_field": schema.StringAttribute{ + Computed: true, + Description: `The field in the entity that contains the vector`, + }, + }, + Description: `Indexing configuration`, + }, + "processing": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "chunk_overlap": schema.Int64Attribute{ + Computed: true, + Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`, + }, + "chunk_size": schema.Int64Attribute{ + Computed: true, + Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`, + }, + "metadata_fields": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`, + }, + "text_fields": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`, + }, + }, + }, + }, + }, + "destination_id": schema.StringAttribute{ + Required: true, + }, + "name": schema.StringAttribute{ + Computed: true, + }, + "workspace_id": schema.StringAttribute{ + Computed: true, + }, + }, + } +} + +func (r *DestinationMilvusDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*sdk.SDK) + + if !ok { + resp.Diagnostics.AddError( + "Unexpected DataSource Configure Type", + fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + + return + } + + r.client = client +} + +func (r *DestinationMilvusDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data *DestinationMilvusDataSourceModel + var item types.Object + + resp.Diagnostics.Append(req.Config.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + destinationID := data.DestinationID.ValueString() + request := operations.GetDestinationMilvusRequest{ + DestinationID: destinationID, + } + res, err := r.client.Destinations.GetDestinationMilvus(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if res.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + if res.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) + return + } + data.RefreshFromGetResponse(res.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/internal/provider/source_datadog_data_source_sdk.go b/internal/provider/destination_milvus_data_source_sdk.go similarity index 65% rename from internal/provider/source_datadog_data_source_sdk.go rename to internal/provider/destination_milvus_data_source_sdk.go index 10cf5724e..9ef8f76e3 100755 --- a/internal/provider/source_datadog_data_source_sdk.go +++ b/internal/provider/destination_milvus_data_source_sdk.go @@ -7,8 +7,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) -func (r *SourceDatadogDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) { +func (r *DestinationMilvusDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) { + r.DestinationID = types.StringValue(resp.DestinationID) r.Name = types.StringValue(resp.Name) - r.SourceID = types.StringValue(resp.SourceID) r.WorkspaceID = types.StringValue(resp.WorkspaceID) } diff --git a/internal/provider/destination_milvus_resource.go b/internal/provider/destination_milvus_resource.go new file mode 100755 index 000000000..97b2461aa --- /dev/null +++ b/internal/provider/destination_milvus_resource.go @@ -0,0 +1,640 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import ( + "airbyte/internal/sdk" + "context" + "fmt" + + speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier" + "airbyte/internal/sdk/pkg/models/operations" + "airbyte/internal/validators" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// Ensure provider defined types fully satisfy framework interfaces. +var _ resource.Resource = &DestinationMilvusResource{} +var _ resource.ResourceWithImportState = &DestinationMilvusResource{} + +func NewDestinationMilvusResource() resource.Resource { + return &DestinationMilvusResource{} +} + +// DestinationMilvusResource defines the resource implementation. +type DestinationMilvusResource struct { + client *sdk.SDK +} + +// DestinationMilvusResourceModel describes the resource data model. +type DestinationMilvusResourceModel struct { + Configuration DestinationMilvus `tfsdk:"configuration"` + DestinationID types.String `tfsdk:"destination_id"` + DestinationType types.String `tfsdk:"destination_type"` + Name types.String `tfsdk:"name"` + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +func (r *DestinationMilvusResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_destination_milvus" +} + +func (r *DestinationMilvusResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "DestinationMilvus Resource", + + Attributes: map[string]schema.Attribute{ + "configuration": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "destination_type": schema.StringAttribute{ + Required: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "milvus", + ), + }, + Description: `must be one of ["milvus"]`, + }, + "embedding": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "destination_milvus_embedding_cohere": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Required: true, + }, + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_milvus_embedding_fake": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_milvus_embedding_from_field": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "dimensions": schema.Int64Attribute{ + Required: true, + Description: `The number of dimensions the embedding model is generating`, + }, + "field_name": schema.StringAttribute{ + Required: true, + Description: `Name of the field in the record that contains the embedding`, + }, + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "from_field", + ), + }, + Description: `must be one of ["from_field"]`, + }, + }, + Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`, + }, + "destination_milvus_embedding_open_ai": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Required: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + "destination_milvus_update_embedding_cohere": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Required: true, + }, + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_milvus_update_embedding_fake": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_milvus_update_embedding_from_field": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "dimensions": schema.Int64Attribute{ + Required: true, + Description: `The number of dimensions the embedding model is generating`, + }, + "field_name": schema.StringAttribute{ + Required: true, + Description: `Name of the field in the record that contains the embedding`, + }, + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "from_field", + ), + }, + Description: `must be one of ["from_field"]`, + }, + }, + Description: `Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store.`, + }, + "destination_milvus_update_embedding_open_ai": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Required: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `Embedding configuration`, + }, + "indexing": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "auth": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "destination_milvus_indexing_authentication_api_token": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "token", + ), + }, + Description: `must be one of ["token"]`, + }, + "token": schema.StringAttribute{ + Required: true, + Description: `API Token for the Milvus instance`, + }, + }, + Description: `Authenticate using an API token (suitable for Zilliz Cloud)`, + }, + "destination_milvus_indexing_authentication_no_auth": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "no_auth", + ), + }, + Description: `must be one of ["no_auth"]`, + }, + }, + Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`, + }, + "destination_milvus_indexing_authentication_username_password": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "username_password", + ), + }, + Description: `must be one of ["username_password"]`, + }, + "password": schema.StringAttribute{ + Required: true, + Description: `Password for the Milvus instance`, + }, + "username": schema.StringAttribute{ + Required: true, + Description: `Username for the Milvus instance`, + }, + }, + Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`, + }, + "destination_milvus_update_indexing_authentication_api_token": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "token", + ), + }, + Description: `must be one of ["token"]`, + }, + "token": schema.StringAttribute{ + Required: true, + Description: `API Token for the Milvus instance`, + }, + }, + Description: `Authenticate using an API token (suitable for Zilliz Cloud)`, + }, + "destination_milvus_update_indexing_authentication_no_auth": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "no_auth", + ), + }, + Description: `must be one of ["no_auth"]`, + }, + }, + Description: `Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses)`, + }, + "destination_milvus_update_indexing_authentication_username_password": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "username_password", + ), + }, + Description: `must be one of ["username_password"]`, + }, + "password": schema.StringAttribute{ + Required: true, + Description: `Password for the Milvus instance`, + }, + "username": schema.StringAttribute{ + Required: true, + Description: `Username for the Milvus instance`, + }, + }, + Description: `Authenticate using username and password (suitable for self-managed Milvus clusters)`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `Authentication method`, + }, + "collection": schema.StringAttribute{ + Required: true, + Description: `The collection to load data into`, + }, + "db": schema.StringAttribute{ + Optional: true, + Description: `The database to connect to`, + }, + "host": schema.StringAttribute{ + Required: true, + Description: `The public endpoint of the Milvus instance. `, + }, + "text_field": schema.StringAttribute{ + Optional: true, + Description: `The field in the entity that contains the embedded text`, + }, + "vector_field": schema.StringAttribute{ + Optional: true, + Description: `The field in the entity that contains the vector`, + }, + }, + Description: `Indexing configuration`, + }, + "processing": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "chunk_overlap": schema.Int64Attribute{ + Optional: true, + Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`, + }, + "chunk_size": schema.Int64Attribute{ + Required: true, + Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`, + }, + "metadata_fields": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`, + }, + "text_fields": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`, + }, + }, + }, + }, + }, + "destination_id": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + }, + "destination_type": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + }, + "name": schema.StringAttribute{ + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + Required: true, + }, + "workspace_id": schema.StringAttribute{ + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + Required: true, + }, + }, + } +} + +func (r *DestinationMilvusResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*sdk.SDK) + + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + + return + } + + r.client = client +} + +func (r *DestinationMilvusResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data *DestinationMilvusResourceModel + var item types.Object + + resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + request := *data.ToCreateSDKType() + res, err := r.client.Destinations.CreateDestinationMilvus(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if res.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + if res.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) + return + } + data.RefreshFromCreateResponse(res.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *DestinationMilvusResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data *DestinationMilvusResourceModel + var item types.Object + + resp.Diagnostics.Append(req.State.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + destinationID := data.DestinationID.ValueString() + request := operations.GetDestinationMilvusRequest{ + DestinationID: destinationID, + } + res, err := r.client.Destinations.GetDestinationMilvus(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if res.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + if res.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) + return + } + data.RefreshFromGetResponse(res.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *DestinationMilvusResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data *DestinationMilvusResourceModel + merge(ctx, req, resp, &data) + if resp.Diagnostics.HasError() { + return + } + + destinationMilvusPutRequest := data.ToUpdateSDKType() + destinationID := data.DestinationID.ValueString() + request := operations.PutDestinationMilvusRequest{ + DestinationMilvusPutRequest: destinationMilvusPutRequest, + DestinationID: destinationID, + } + res, err := r.client.Destinations.PutDestinationMilvus(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + destinationId1 := data.DestinationID.ValueString() + getRequest := operations.GetDestinationMilvusRequest{ + DestinationID: destinationId1, + } + getResponse, err := r.client.Destinations.GetDestinationMilvus(ctx, getRequest) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if getResponse == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse)) + return + } + if getResponse.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse)) + return + } + if getResponse.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse)) + return + } + data.RefreshFromGetResponse(getResponse.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *DestinationMilvusResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data *DestinationMilvusResourceModel + var item types.Object + + resp.Diagnostics.Append(req.State.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + destinationID := data.DestinationID.ValueString() + request := operations.DeleteDestinationMilvusRequest{ + DestinationID: destinationID, + } + res, err := r.client.Destinations.DeleteDestinationMilvus(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + +} + +func (r *DestinationMilvusResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp) +} diff --git a/internal/provider/destination_milvus_resource_sdk.go b/internal/provider/destination_milvus_resource_sdk.go new file mode 100755 index 000000000..6089fa5e2 --- /dev/null +++ b/internal/provider/destination_milvus_resource_sdk.go @@ -0,0 +1,431 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import ( + "airbyte/internal/sdk/pkg/models/shared" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *DestinationMilvusResourceModel) ToCreateSDKType() *shared.DestinationMilvusCreateRequest { + destinationType := shared.DestinationMilvusMilvus(r.Configuration.DestinationType.ValueString()) + var embedding shared.DestinationMilvusEmbedding + var destinationMilvusEmbeddingOpenAI *shared.DestinationMilvusEmbeddingOpenAI + if r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI != nil { + mode := new(shared.DestinationMilvusEmbeddingOpenAIMode) + if !r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.Mode.IsNull() { + *mode = shared.DestinationMilvusEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.Mode.ValueString()) + } else { + mode = nil + } + openaiKey := r.Configuration.Embedding.DestinationMilvusEmbeddingOpenAI.OpenaiKey.ValueString() + destinationMilvusEmbeddingOpenAI = &shared.DestinationMilvusEmbeddingOpenAI{ + Mode: mode, + OpenaiKey: openaiKey, + } + } + if destinationMilvusEmbeddingOpenAI != nil { + embedding = shared.DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingOpenAI: destinationMilvusEmbeddingOpenAI, + } + } + var destinationMilvusEmbeddingCohere *shared.DestinationMilvusEmbeddingCohere + if r.Configuration.Embedding.DestinationMilvusEmbeddingCohere != nil { + cohereKey := r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.CohereKey.ValueString() + mode1 := new(shared.DestinationMilvusEmbeddingCohereMode) + if !r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.Mode.IsNull() { + *mode1 = shared.DestinationMilvusEmbeddingCohereMode(r.Configuration.Embedding.DestinationMilvusEmbeddingCohere.Mode.ValueString()) + } else { + mode1 = nil + } + destinationMilvusEmbeddingCohere = &shared.DestinationMilvusEmbeddingCohere{ + CohereKey: cohereKey, + Mode: mode1, + } + } + if destinationMilvusEmbeddingCohere != nil { + embedding = shared.DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingCohere: destinationMilvusEmbeddingCohere, + } + } + var destinationMilvusEmbeddingFake *shared.DestinationMilvusEmbeddingFake + if r.Configuration.Embedding.DestinationMilvusEmbeddingFake != nil { + mode2 := new(shared.DestinationMilvusEmbeddingFakeMode) + if !r.Configuration.Embedding.DestinationMilvusEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingFake.Mode.IsNull() { + *mode2 = shared.DestinationMilvusEmbeddingFakeMode(r.Configuration.Embedding.DestinationMilvusEmbeddingFake.Mode.ValueString()) + } else { + mode2 = nil + } + destinationMilvusEmbeddingFake = &shared.DestinationMilvusEmbeddingFake{ + Mode: mode2, + } + } + if destinationMilvusEmbeddingFake != nil { + embedding = shared.DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingFake: destinationMilvusEmbeddingFake, + } + } + var destinationMilvusEmbeddingFromField *shared.DestinationMilvusEmbeddingFromField + if r.Configuration.Embedding.DestinationMilvusEmbeddingFromField != nil { + dimensions := r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Dimensions.ValueInt64() + fieldName := r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.FieldName.ValueString() + mode3 := new(shared.DestinationMilvusEmbeddingFromFieldMode) + if !r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Mode.IsNull() { + *mode3 = shared.DestinationMilvusEmbeddingFromFieldMode(r.Configuration.Embedding.DestinationMilvusEmbeddingFromField.Mode.ValueString()) + } else { + mode3 = nil + } + destinationMilvusEmbeddingFromField = &shared.DestinationMilvusEmbeddingFromField{ + Dimensions: dimensions, + FieldName: fieldName, + Mode: mode3, + } + } + if destinationMilvusEmbeddingFromField != nil { + embedding = shared.DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingFromField: destinationMilvusEmbeddingFromField, + } + } + var auth shared.DestinationMilvusIndexingAuthentication + var destinationMilvusIndexingAuthenticationAPIToken *shared.DestinationMilvusIndexingAuthenticationAPIToken + if r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken != nil { + mode4 := new(shared.DestinationMilvusIndexingAuthenticationAPITokenMode) + if !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Mode.IsNull() { + *mode4 = shared.DestinationMilvusIndexingAuthenticationAPITokenMode(r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Mode.ValueString()) + } else { + mode4 = nil + } + token := r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationAPIToken.Token.ValueString() + destinationMilvusIndexingAuthenticationAPIToken = &shared.DestinationMilvusIndexingAuthenticationAPIToken{ + Mode: mode4, + Token: token, + } + } + if destinationMilvusIndexingAuthenticationAPIToken != nil { + auth = shared.DestinationMilvusIndexingAuthentication{ + DestinationMilvusIndexingAuthenticationAPIToken: destinationMilvusIndexingAuthenticationAPIToken, + } + } + var destinationMilvusIndexingAuthenticationUsernamePassword *shared.DestinationMilvusIndexingAuthenticationUsernamePassword + if r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword != nil { + mode5 := new(shared.DestinationMilvusIndexingAuthenticationUsernamePasswordMode) + if !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Mode.IsNull() { + *mode5 = shared.DestinationMilvusIndexingAuthenticationUsernamePasswordMode(r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Mode.ValueString()) + } else { + mode5 = nil + } + password := r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Password.ValueString() + username := r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationUsernamePassword.Username.ValueString() + destinationMilvusIndexingAuthenticationUsernamePassword = &shared.DestinationMilvusIndexingAuthenticationUsernamePassword{ + Mode: mode5, + Password: password, + Username: username, + } + } + if destinationMilvusIndexingAuthenticationUsernamePassword != nil { + auth = shared.DestinationMilvusIndexingAuthentication{ + DestinationMilvusIndexingAuthenticationUsernamePassword: destinationMilvusIndexingAuthenticationUsernamePassword, + } + } + var destinationMilvusIndexingAuthenticationNoAuth *shared.DestinationMilvusIndexingAuthenticationNoAuth + if r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth != nil { + mode6 := new(shared.DestinationMilvusIndexingAuthenticationNoAuthMode) + if !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth.Mode.IsNull() { + *mode6 = shared.DestinationMilvusIndexingAuthenticationNoAuthMode(r.Configuration.Indexing.Auth.DestinationMilvusIndexingAuthenticationNoAuth.Mode.ValueString()) + } else { + mode6 = nil + } + destinationMilvusIndexingAuthenticationNoAuth = &shared.DestinationMilvusIndexingAuthenticationNoAuth{ + Mode: mode6, + } + } + if destinationMilvusIndexingAuthenticationNoAuth != nil { + auth = shared.DestinationMilvusIndexingAuthentication{ + DestinationMilvusIndexingAuthenticationNoAuth: destinationMilvusIndexingAuthenticationNoAuth, + } + } + collection := r.Configuration.Indexing.Collection.ValueString() + db := new(string) + if !r.Configuration.Indexing.Db.IsUnknown() && !r.Configuration.Indexing.Db.IsNull() { + *db = r.Configuration.Indexing.Db.ValueString() + } else { + db = nil + } + host := r.Configuration.Indexing.Host.ValueString() + textField := new(string) + if !r.Configuration.Indexing.TextField.IsUnknown() && !r.Configuration.Indexing.TextField.IsNull() { + *textField = r.Configuration.Indexing.TextField.ValueString() + } else { + textField = nil + } + vectorField := new(string) + if !r.Configuration.Indexing.VectorField.IsUnknown() && !r.Configuration.Indexing.VectorField.IsNull() { + *vectorField = r.Configuration.Indexing.VectorField.ValueString() + } else { + vectorField = nil + } + indexing := shared.DestinationMilvusIndexing{ + Auth: auth, + Collection: collection, + Db: db, + Host: host, + TextField: textField, + VectorField: vectorField, + } + chunkOverlap := new(int64) + if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() { + *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64() + } else { + chunkOverlap = nil + } + chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64() + var metadataFields []string = nil + for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields { + metadataFields = append(metadataFields, metadataFieldsItem.ValueString()) + } + var textFields []string = nil + for _, textFieldsItem := range r.Configuration.Processing.TextFields { + textFields = append(textFields, textFieldsItem.ValueString()) + } + processing := shared.DestinationMilvusProcessingConfigModel{ + ChunkOverlap: chunkOverlap, + ChunkSize: chunkSize, + MetadataFields: metadataFields, + TextFields: textFields, + } + configuration := shared.DestinationMilvus{ + DestinationType: destinationType, + Embedding: embedding, + Indexing: indexing, + Processing: processing, + } + name := r.Name.ValueString() + workspaceID := r.WorkspaceID.ValueString() + out := shared.DestinationMilvusCreateRequest{ + Configuration: configuration, + Name: name, + WorkspaceID: workspaceID, + } + return &out +} + +func (r *DestinationMilvusResourceModel) ToGetSDKType() *shared.DestinationMilvusCreateRequest { + out := r.ToCreateSDKType() + return out +} + +func (r *DestinationMilvusResourceModel) ToUpdateSDKType() *shared.DestinationMilvusPutRequest { + var embedding shared.DestinationMilvusUpdateEmbedding + var destinationMilvusUpdateEmbeddingOpenAI *shared.DestinationMilvusUpdateEmbeddingOpenAI + if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI != nil { + mode := new(shared.DestinationMilvusUpdateEmbeddingOpenAIMode) + if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.Mode.IsNull() { + *mode = shared.DestinationMilvusUpdateEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.Mode.ValueString()) + } else { + mode = nil + } + openaiKey := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingOpenAI.OpenaiKey.ValueString() + destinationMilvusUpdateEmbeddingOpenAI = &shared.DestinationMilvusUpdateEmbeddingOpenAI{ + Mode: mode, + OpenaiKey: openaiKey, + } + } + if destinationMilvusUpdateEmbeddingOpenAI != nil { + embedding = shared.DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingOpenAI: destinationMilvusUpdateEmbeddingOpenAI, + } + } + var destinationMilvusUpdateEmbeddingCohere *shared.DestinationMilvusUpdateEmbeddingCohere + if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere != nil { + cohereKey := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.CohereKey.ValueString() + mode1 := new(shared.DestinationMilvusUpdateEmbeddingCohereMode) + if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.Mode.IsNull() { + *mode1 = shared.DestinationMilvusUpdateEmbeddingCohereMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingCohere.Mode.ValueString()) + } else { + mode1 = nil + } + destinationMilvusUpdateEmbeddingCohere = &shared.DestinationMilvusUpdateEmbeddingCohere{ + CohereKey: cohereKey, + Mode: mode1, + } + } + if destinationMilvusUpdateEmbeddingCohere != nil { + embedding = shared.DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingCohere: destinationMilvusUpdateEmbeddingCohere, + } + } + var destinationMilvusUpdateEmbeddingFake *shared.DestinationMilvusUpdateEmbeddingFake + if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake != nil { + mode2 := new(shared.DestinationMilvusUpdateEmbeddingFakeMode) + if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake.Mode.IsNull() { + *mode2 = shared.DestinationMilvusUpdateEmbeddingFakeMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFake.Mode.ValueString()) + } else { + mode2 = nil + } + destinationMilvusUpdateEmbeddingFake = &shared.DestinationMilvusUpdateEmbeddingFake{ + Mode: mode2, + } + } + if destinationMilvusUpdateEmbeddingFake != nil { + embedding = shared.DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingFake: destinationMilvusUpdateEmbeddingFake, + } + } + var destinationMilvusUpdateEmbeddingFromField *shared.DestinationMilvusUpdateEmbeddingFromField + if r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField != nil { + dimensions := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Dimensions.ValueInt64() + fieldName := r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.FieldName.ValueString() + mode3 := new(shared.DestinationMilvusUpdateEmbeddingFromFieldMode) + if !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Mode.IsNull() { + *mode3 = shared.DestinationMilvusUpdateEmbeddingFromFieldMode(r.Configuration.Embedding.DestinationMilvusUpdateEmbeddingFromField.Mode.ValueString()) + } else { + mode3 = nil + } + destinationMilvusUpdateEmbeddingFromField = &shared.DestinationMilvusUpdateEmbeddingFromField{ + Dimensions: dimensions, + FieldName: fieldName, + Mode: mode3, + } + } + if destinationMilvusUpdateEmbeddingFromField != nil { + embedding = shared.DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingFromField: destinationMilvusUpdateEmbeddingFromField, + } + } + var auth shared.DestinationMilvusUpdateIndexingAuthentication + var destinationMilvusUpdateIndexingAuthenticationAPIToken *shared.DestinationMilvusUpdateIndexingAuthenticationAPIToken + if r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken != nil { + mode4 := new(shared.DestinationMilvusUpdateIndexingAuthenticationAPITokenMode) + if !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Mode.IsNull() { + *mode4 = shared.DestinationMilvusUpdateIndexingAuthenticationAPITokenMode(r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Mode.ValueString()) + } else { + mode4 = nil + } + token := r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationAPIToken.Token.ValueString() + destinationMilvusUpdateIndexingAuthenticationAPIToken = &shared.DestinationMilvusUpdateIndexingAuthenticationAPIToken{ + Mode: mode4, + Token: token, + } + } + if destinationMilvusUpdateIndexingAuthenticationAPIToken != nil { + auth = shared.DestinationMilvusUpdateIndexingAuthentication{ + DestinationMilvusUpdateIndexingAuthenticationAPIToken: destinationMilvusUpdateIndexingAuthenticationAPIToken, + } + } + var destinationMilvusUpdateIndexingAuthenticationUsernamePassword *shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword + if r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword != nil { + mode5 := new(shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode) + if !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Mode.IsNull() { + *mode5 = shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode(r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Mode.ValueString()) + } else { + mode5 = nil + } + password := r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Password.ValueString() + username := r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword.Username.ValueString() + destinationMilvusUpdateIndexingAuthenticationUsernamePassword = &shared.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword{ + Mode: mode5, + Password: password, + Username: username, + } + } + if destinationMilvusUpdateIndexingAuthenticationUsernamePassword != nil { + auth = shared.DestinationMilvusUpdateIndexingAuthentication{ + DestinationMilvusUpdateIndexingAuthenticationUsernamePassword: destinationMilvusUpdateIndexingAuthenticationUsernamePassword, + } + } + var destinationMilvusUpdateIndexingAuthenticationNoAuth *shared.DestinationMilvusUpdateIndexingAuthenticationNoAuth + if r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth != nil { + mode6 := new(shared.DestinationMilvusUpdateIndexingAuthenticationNoAuthMode) + if !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth.Mode.IsUnknown() && !r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth.Mode.IsNull() { + *mode6 = shared.DestinationMilvusUpdateIndexingAuthenticationNoAuthMode(r.Configuration.Indexing.Auth.DestinationMilvusUpdateIndexingAuthenticationNoAuth.Mode.ValueString()) + } else { + mode6 = nil + } + destinationMilvusUpdateIndexingAuthenticationNoAuth = &shared.DestinationMilvusUpdateIndexingAuthenticationNoAuth{ + Mode: mode6, + } + } + if destinationMilvusUpdateIndexingAuthenticationNoAuth != nil { + auth = shared.DestinationMilvusUpdateIndexingAuthentication{ + DestinationMilvusUpdateIndexingAuthenticationNoAuth: destinationMilvusUpdateIndexingAuthenticationNoAuth, + } + } + collection := r.Configuration.Indexing.Collection.ValueString() + db := new(string) + if !r.Configuration.Indexing.Db.IsUnknown() && !r.Configuration.Indexing.Db.IsNull() { + *db = r.Configuration.Indexing.Db.ValueString() + } else { + db = nil + } + host := r.Configuration.Indexing.Host.ValueString() + textField := new(string) + if !r.Configuration.Indexing.TextField.IsUnknown() && !r.Configuration.Indexing.TextField.IsNull() { + *textField = r.Configuration.Indexing.TextField.ValueString() + } else { + textField = nil + } + vectorField := new(string) + if !r.Configuration.Indexing.VectorField.IsUnknown() && !r.Configuration.Indexing.VectorField.IsNull() { + *vectorField = r.Configuration.Indexing.VectorField.ValueString() + } else { + vectorField = nil + } + indexing := shared.DestinationMilvusUpdateIndexing{ + Auth: auth, + Collection: collection, + Db: db, + Host: host, + TextField: textField, + VectorField: vectorField, + } + chunkOverlap := new(int64) + if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() { + *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64() + } else { + chunkOverlap = nil + } + chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64() + var metadataFields []string = nil + for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields { + metadataFields = append(metadataFields, metadataFieldsItem.ValueString()) + } + var textFields []string = nil + for _, textFieldsItem := range r.Configuration.Processing.TextFields { + textFields = append(textFields, textFieldsItem.ValueString()) + } + processing := shared.DestinationMilvusUpdateProcessingConfigModel{ + ChunkOverlap: chunkOverlap, + ChunkSize: chunkSize, + MetadataFields: metadataFields, + TextFields: textFields, + } + configuration := shared.DestinationMilvusUpdate{ + Embedding: embedding, + Indexing: indexing, + Processing: processing, + } + name := r.Name.ValueString() + workspaceID := r.WorkspaceID.ValueString() + out := shared.DestinationMilvusPutRequest{ + Configuration: configuration, + Name: name, + WorkspaceID: workspaceID, + } + return &out +} + +func (r *DestinationMilvusResourceModel) ToDeleteSDKType() *shared.DestinationMilvusCreateRequest { + out := r.ToCreateSDKType() + return out +} + +func (r *DestinationMilvusResourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) { + r.DestinationID = types.StringValue(resp.DestinationID) + r.DestinationType = types.StringValue(resp.DestinationType) + r.Name = types.StringValue(resp.Name) + r.WorkspaceID = types.StringValue(resp.WorkspaceID) +} + +func (r *DestinationMilvusResourceModel) RefreshFromCreateResponse(resp *shared.DestinationResponse) { + r.RefreshFromGetResponse(resp) +} diff --git a/internal/provider/destination_pinecone_data_source.go b/internal/provider/destination_pinecone_data_source.go new file mode 100755 index 000000000..fc6ff453c --- /dev/null +++ b/internal/provider/destination_pinecone_data_source.go @@ -0,0 +1,296 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import ( + "airbyte/internal/sdk" + "airbyte/internal/sdk/pkg/models/operations" + "context" + "fmt" + + "airbyte/internal/validators" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/datasource" + "github.com/hashicorp/terraform-plugin-framework/datasource/schema" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// Ensure provider defined types fully satisfy framework interfaces. +var _ datasource.DataSource = &DestinationPineconeDataSource{} +var _ datasource.DataSourceWithConfigure = &DestinationPineconeDataSource{} + +func NewDestinationPineconeDataSource() datasource.DataSource { + return &DestinationPineconeDataSource{} +} + +// DestinationPineconeDataSource is the data source implementation. +type DestinationPineconeDataSource struct { + client *sdk.SDK +} + +// DestinationPineconeDataSourceModel describes the data model. +type DestinationPineconeDataSourceModel struct { + Configuration DestinationPinecone `tfsdk:"configuration"` + DestinationID types.String `tfsdk:"destination_id"` + Name types.String `tfsdk:"name"` + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +// Metadata returns the data source type name. +func (r *DestinationPineconeDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_destination_pinecone" +} + +// Schema defines the schema for the data source. +func (r *DestinationPineconeDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "DestinationPinecone DataSource", + + Attributes: map[string]schema.Attribute{ + "configuration": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "destination_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "pinecone", + ), + }, + Description: `must be one of ["pinecone"]`, + }, + "embedding": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "destination_pinecone_embedding_cohere": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Computed: true, + }, + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_pinecone_embedding_fake": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_pinecone_embedding_open_ai": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Computed: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + "destination_pinecone_update_embedding_cohere": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Computed: true, + }, + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_pinecone_update_embedding_fake": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_pinecone_update_embedding_open_ai": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Computed: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `Embedding configuration`, + }, + "indexing": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "index": schema.StringAttribute{ + Computed: true, + Description: `Pinecone index to use`, + }, + "pinecone_environment": schema.StringAttribute{ + Computed: true, + Description: `Pinecone environment to use`, + }, + "pinecone_key": schema.StringAttribute{ + Computed: true, + }, + }, + Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings.`, + }, + "processing": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "chunk_overlap": schema.Int64Attribute{ + Computed: true, + Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`, + }, + "chunk_size": schema.Int64Attribute{ + Computed: true, + Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`, + }, + "metadata_fields": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`, + }, + "text_fields": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`, + }, + }, + }, + }, + }, + "destination_id": schema.StringAttribute{ + Required: true, + }, + "name": schema.StringAttribute{ + Computed: true, + }, + "workspace_id": schema.StringAttribute{ + Computed: true, + }, + }, + } +} + +func (r *DestinationPineconeDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*sdk.SDK) + + if !ok { + resp.Diagnostics.AddError( + "Unexpected DataSource Configure Type", + fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + + return + } + + r.client = client +} + +func (r *DestinationPineconeDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { + var data *DestinationPineconeDataSourceModel + var item types.Object + + resp.Diagnostics.Append(req.Config.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + destinationID := data.DestinationID.ValueString() + request := operations.GetDestinationPineconeRequest{ + DestinationID: destinationID, + } + res, err := r.client.Destinations.GetDestinationPinecone(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if res.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + if res.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) + return + } + data.RefreshFromGetResponse(res.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} diff --git a/internal/provider/source_openweather_data_source_sdk.go b/internal/provider/destination_pinecone_data_source_sdk.go similarity index 64% rename from internal/provider/source_openweather_data_source_sdk.go rename to internal/provider/destination_pinecone_data_source_sdk.go index 8ebcca043..a382d69e4 100755 --- a/internal/provider/source_openweather_data_source_sdk.go +++ b/internal/provider/destination_pinecone_data_source_sdk.go @@ -7,8 +7,8 @@ import ( "github.com/hashicorp/terraform-plugin-framework/types" ) -func (r *SourceOpenweatherDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) { +func (r *DestinationPineconeDataSourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) { + r.DestinationID = types.StringValue(resp.DestinationID) r.Name = types.StringValue(resp.Name) - r.SourceID = types.StringValue(resp.SourceID) r.WorkspaceID = types.StringValue(resp.WorkspaceID) } diff --git a/internal/provider/destination_pinecone_resource.go b/internal/provider/destination_pinecone_resource.go new file mode 100755 index 000000000..e64f09126 --- /dev/null +++ b/internal/provider/destination_pinecone_resource.go @@ -0,0 +1,462 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import ( + "airbyte/internal/sdk" + "context" + "fmt" + + speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier" + "airbyte/internal/sdk/pkg/models/operations" + "airbyte/internal/validators" + "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" + "github.com/hashicorp/terraform-plugin-framework/path" + "github.com/hashicorp/terraform-plugin-framework/resource" + "github.com/hashicorp/terraform-plugin-framework/resource/schema" + "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" + "github.com/hashicorp/terraform-plugin-framework/schema/validator" + "github.com/hashicorp/terraform-plugin-framework/types" + "github.com/hashicorp/terraform-plugin-framework/types/basetypes" +) + +// Ensure provider defined types fully satisfy framework interfaces. +var _ resource.Resource = &DestinationPineconeResource{} +var _ resource.ResourceWithImportState = &DestinationPineconeResource{} + +func NewDestinationPineconeResource() resource.Resource { + return &DestinationPineconeResource{} +} + +// DestinationPineconeResource defines the resource implementation. +type DestinationPineconeResource struct { + client *sdk.SDK +} + +// DestinationPineconeResourceModel describes the resource data model. +type DestinationPineconeResourceModel struct { + Configuration DestinationPinecone `tfsdk:"configuration"` + DestinationID types.String `tfsdk:"destination_id"` + DestinationType types.String `tfsdk:"destination_type"` + Name types.String `tfsdk:"name"` + WorkspaceID types.String `tfsdk:"workspace_id"` +} + +func (r *DestinationPineconeResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { + resp.TypeName = req.ProviderTypeName + "_destination_pinecone" +} + +func (r *DestinationPineconeResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { + resp.Schema = schema.Schema{ + MarkdownDescription: "DestinationPinecone Resource", + + Attributes: map[string]schema.Attribute{ + "configuration": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "destination_type": schema.StringAttribute{ + Required: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "pinecone", + ), + }, + Description: `must be one of ["pinecone"]`, + }, + "embedding": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "destination_pinecone_embedding_cohere": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Required: true, + }, + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_pinecone_embedding_fake": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_pinecone_embedding_open_ai": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Required: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + "destination_pinecone_update_embedding_cohere": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "cohere_key": schema.StringAttribute{ + Required: true, + }, + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "cohere", + ), + }, + Description: `must be one of ["cohere"]`, + }, + }, + Description: `Use the Cohere API to embed text.`, + }, + "destination_pinecone_update_embedding_fake": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "fake", + ), + }, + Description: `must be one of ["fake"]`, + }, + }, + Description: `Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs.`, + }, + "destination_pinecone_update_embedding_open_ai": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "mode": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "openai", + ), + }, + Description: `must be one of ["openai"]`, + }, + "openai_key": schema.StringAttribute{ + Required: true, + }, + }, + Description: `Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `Embedding configuration`, + }, + "indexing": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "index": schema.StringAttribute{ + Required: true, + Description: `Pinecone index to use`, + }, + "pinecone_environment": schema.StringAttribute{ + Required: true, + Description: `Pinecone environment to use`, + }, + "pinecone_key": schema.StringAttribute{ + Required: true, + }, + }, + Description: `Pinecone is a popular vector store that can be used to store and retrieve embeddings.`, + }, + "processing": schema.SingleNestedAttribute{ + Required: true, + Attributes: map[string]schema.Attribute{ + "chunk_overlap": schema.Int64Attribute{ + Optional: true, + Description: `Size of overlap between chunks in tokens to store in vector store to better capture relevant context`, + }, + "chunk_size": schema.Int64Attribute{ + Required: true, + Description: `Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM)`, + }, + "metadata_fields": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path.`, + }, + "text_fields": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. ` + "`" + `user.name` + "`" + ` will access the ` + "`" + `name` + "`" + ` field in the ` + "`" + `user` + "`" + ` object. It's also possible to use wildcards to access all fields in an object, e.g. ` + "`" + `users.*.name` + "`" + ` will access all ` + "`" + `names` + "`" + ` fields in all entries of the ` + "`" + `users` + "`" + ` array.`, + }, + }, + }, + }, + }, + "destination_id": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + }, + "destination_type": schema.StringAttribute{ + Computed: true, + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + }, + "name": schema.StringAttribute{ + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + Required: true, + }, + "workspace_id": schema.StringAttribute{ + PlanModifiers: []planmodifier.String{ + speakeasy_stringplanmodifier.SuppressDiff(), + }, + Required: true, + }, + }, + } +} + +func (r *DestinationPineconeResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { + // Prevent panic if the provider has not been configured. + if req.ProviderData == nil { + return + } + + client, ok := req.ProviderData.(*sdk.SDK) + + if !ok { + resp.Diagnostics.AddError( + "Unexpected Resource Configure Type", + fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), + ) + + return + } + + r.client = client +} + +func (r *DestinationPineconeResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { + var data *DestinationPineconeResourceModel + var item types.Object + + resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + request := *data.ToCreateSDKType() + res, err := r.client.Destinations.CreateDestinationPinecone(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if res.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + if res.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) + return + } + data.RefreshFromCreateResponse(res.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *DestinationPineconeResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { + var data *DestinationPineconeResourceModel + var item types.Object + + resp.Diagnostics.Append(req.State.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + destinationID := data.DestinationID.ValueString() + request := operations.GetDestinationPineconeRequest{ + DestinationID: destinationID, + } + res, err := r.client.Destinations.GetDestinationPinecone(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if res.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + if res.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) + return + } + data.RefreshFromGetResponse(res.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *DestinationPineconeResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { + var data *DestinationPineconeResourceModel + merge(ctx, req, resp, &data) + if resp.Diagnostics.HasError() { + return + } + + destinationPineconePutRequest := data.ToUpdateSDKType() + destinationID := data.DestinationID.ValueString() + request := operations.PutDestinationPineconeRequest{ + DestinationPineconePutRequest: destinationPineconePutRequest, + DestinationID: destinationID, + } + res, err := r.client.Destinations.PutDestinationPinecone(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + destinationId1 := data.DestinationID.ValueString() + getRequest := operations.GetDestinationPineconeRequest{ + DestinationID: destinationId1, + } + getResponse, err := r.client.Destinations.GetDestinationPinecone(ctx, getRequest) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if getResponse == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse)) + return + } + if getResponse.StatusCode != 200 { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse)) + return + } + if getResponse.DestinationResponse == nil { + resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse)) + return + } + data.RefreshFromGetResponse(getResponse.DestinationResponse) + + // Save updated data into Terraform state + resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) +} + +func (r *DestinationPineconeResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { + var data *DestinationPineconeResourceModel + var item types.Object + + resp.Diagnostics.Append(req.State.Get(ctx, &item)...) + if resp.Diagnostics.HasError() { + return + } + + resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ + UnhandledNullAsEmpty: true, + UnhandledUnknownAsEmpty: true, + })...) + + if resp.Diagnostics.HasError() { + return + } + + destinationID := data.DestinationID.ValueString() + request := operations.DeleteDestinationPineconeRequest{ + DestinationID: destinationID, + } + res, err := r.client.Destinations.DeleteDestinationPinecone(ctx, request) + if err != nil { + resp.Diagnostics.AddError("failure to invoke API", err.Error()) + if res != nil && res.RawResponse != nil { + resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) + } + return + } + if res == nil { + resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) + return + } + if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { + resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) + return + } + +} + +func (r *DestinationPineconeResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { + resource.ImportStatePassthroughID(ctx, path.Root("destination_id"), req, resp) +} diff --git a/internal/provider/destination_pinecone_resource_sdk.go b/internal/provider/destination_pinecone_resource_sdk.go new file mode 100755 index 000000000..5769344a7 --- /dev/null +++ b/internal/provider/destination_pinecone_resource_sdk.go @@ -0,0 +1,233 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import ( + "airbyte/internal/sdk/pkg/models/shared" + "github.com/hashicorp/terraform-plugin-framework/types" +) + +func (r *DestinationPineconeResourceModel) ToCreateSDKType() *shared.DestinationPineconeCreateRequest { + destinationType := shared.DestinationPineconePinecone(r.Configuration.DestinationType.ValueString()) + var embedding shared.DestinationPineconeEmbedding + var destinationPineconeEmbeddingOpenAI *shared.DestinationPineconeEmbeddingOpenAI + if r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI != nil { + mode := new(shared.DestinationPineconeEmbeddingOpenAIMode) + if !r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.Mode.IsNull() { + *mode = shared.DestinationPineconeEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.Mode.ValueString()) + } else { + mode = nil + } + openaiKey := r.Configuration.Embedding.DestinationPineconeEmbeddingOpenAI.OpenaiKey.ValueString() + destinationPineconeEmbeddingOpenAI = &shared.DestinationPineconeEmbeddingOpenAI{ + Mode: mode, + OpenaiKey: openaiKey, + } + } + if destinationPineconeEmbeddingOpenAI != nil { + embedding = shared.DestinationPineconeEmbedding{ + DestinationPineconeEmbeddingOpenAI: destinationPineconeEmbeddingOpenAI, + } + } + var destinationPineconeEmbeddingCohere *shared.DestinationPineconeEmbeddingCohere + if r.Configuration.Embedding.DestinationPineconeEmbeddingCohere != nil { + cohereKey := r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.CohereKey.ValueString() + mode1 := new(shared.DestinationPineconeEmbeddingCohereMode) + if !r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.Mode.IsNull() { + *mode1 = shared.DestinationPineconeEmbeddingCohereMode(r.Configuration.Embedding.DestinationPineconeEmbeddingCohere.Mode.ValueString()) + } else { + mode1 = nil + } + destinationPineconeEmbeddingCohere = &shared.DestinationPineconeEmbeddingCohere{ + CohereKey: cohereKey, + Mode: mode1, + } + } + if destinationPineconeEmbeddingCohere != nil { + embedding = shared.DestinationPineconeEmbedding{ + DestinationPineconeEmbeddingCohere: destinationPineconeEmbeddingCohere, + } + } + var destinationPineconeEmbeddingFake *shared.DestinationPineconeEmbeddingFake + if r.Configuration.Embedding.DestinationPineconeEmbeddingFake != nil { + mode2 := new(shared.DestinationPineconeEmbeddingFakeMode) + if !r.Configuration.Embedding.DestinationPineconeEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeEmbeddingFake.Mode.IsNull() { + *mode2 = shared.DestinationPineconeEmbeddingFakeMode(r.Configuration.Embedding.DestinationPineconeEmbeddingFake.Mode.ValueString()) + } else { + mode2 = nil + } + destinationPineconeEmbeddingFake = &shared.DestinationPineconeEmbeddingFake{ + Mode: mode2, + } + } + if destinationPineconeEmbeddingFake != nil { + embedding = shared.DestinationPineconeEmbedding{ + DestinationPineconeEmbeddingFake: destinationPineconeEmbeddingFake, + } + } + index := r.Configuration.Indexing.Index.ValueString() + pineconeEnvironment := r.Configuration.Indexing.PineconeEnvironment.ValueString() + pineconeKey := r.Configuration.Indexing.PineconeKey.ValueString() + indexing := shared.DestinationPineconeIndexing{ + Index: index, + PineconeEnvironment: pineconeEnvironment, + PineconeKey: pineconeKey, + } + chunkOverlap := new(int64) + if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() { + *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64() + } else { + chunkOverlap = nil + } + chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64() + var metadataFields []string = nil + for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields { + metadataFields = append(metadataFields, metadataFieldsItem.ValueString()) + } + var textFields []string = nil + for _, textFieldsItem := range r.Configuration.Processing.TextFields { + textFields = append(textFields, textFieldsItem.ValueString()) + } + processing := shared.DestinationPineconeProcessingConfigModel{ + ChunkOverlap: chunkOverlap, + ChunkSize: chunkSize, + MetadataFields: metadataFields, + TextFields: textFields, + } + configuration := shared.DestinationPinecone{ + DestinationType: destinationType, + Embedding: embedding, + Indexing: indexing, + Processing: processing, + } + name := r.Name.ValueString() + workspaceID := r.WorkspaceID.ValueString() + out := shared.DestinationPineconeCreateRequest{ + Configuration: configuration, + Name: name, + WorkspaceID: workspaceID, + } + return &out +} + +func (r *DestinationPineconeResourceModel) ToGetSDKType() *shared.DestinationPineconeCreateRequest { + out := r.ToCreateSDKType() + return out +} + +func (r *DestinationPineconeResourceModel) ToUpdateSDKType() *shared.DestinationPineconePutRequest { + var embedding shared.DestinationPineconeUpdateEmbedding + var destinationPineconeUpdateEmbeddingOpenAI *shared.DestinationPineconeUpdateEmbeddingOpenAI + if r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI != nil { + mode := new(shared.DestinationPineconeUpdateEmbeddingOpenAIMode) + if !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.Mode.IsNull() { + *mode = shared.DestinationPineconeUpdateEmbeddingOpenAIMode(r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.Mode.ValueString()) + } else { + mode = nil + } + openaiKey := r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingOpenAI.OpenaiKey.ValueString() + destinationPineconeUpdateEmbeddingOpenAI = &shared.DestinationPineconeUpdateEmbeddingOpenAI{ + Mode: mode, + OpenaiKey: openaiKey, + } + } + if destinationPineconeUpdateEmbeddingOpenAI != nil { + embedding = shared.DestinationPineconeUpdateEmbedding{ + DestinationPineconeUpdateEmbeddingOpenAI: destinationPineconeUpdateEmbeddingOpenAI, + } + } + var destinationPineconeUpdateEmbeddingCohere *shared.DestinationPineconeUpdateEmbeddingCohere + if r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere != nil { + cohereKey := r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.CohereKey.ValueString() + mode1 := new(shared.DestinationPineconeUpdateEmbeddingCohereMode) + if !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.Mode.IsNull() { + *mode1 = shared.DestinationPineconeUpdateEmbeddingCohereMode(r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingCohere.Mode.ValueString()) + } else { + mode1 = nil + } + destinationPineconeUpdateEmbeddingCohere = &shared.DestinationPineconeUpdateEmbeddingCohere{ + CohereKey: cohereKey, + Mode: mode1, + } + } + if destinationPineconeUpdateEmbeddingCohere != nil { + embedding = shared.DestinationPineconeUpdateEmbedding{ + DestinationPineconeUpdateEmbeddingCohere: destinationPineconeUpdateEmbeddingCohere, + } + } + var destinationPineconeUpdateEmbeddingFake *shared.DestinationPineconeUpdateEmbeddingFake + if r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake != nil { + mode2 := new(shared.DestinationPineconeUpdateEmbeddingFakeMode) + if !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake.Mode.IsUnknown() && !r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake.Mode.IsNull() { + *mode2 = shared.DestinationPineconeUpdateEmbeddingFakeMode(r.Configuration.Embedding.DestinationPineconeUpdateEmbeddingFake.Mode.ValueString()) + } else { + mode2 = nil + } + destinationPineconeUpdateEmbeddingFake = &shared.DestinationPineconeUpdateEmbeddingFake{ + Mode: mode2, + } + } + if destinationPineconeUpdateEmbeddingFake != nil { + embedding = shared.DestinationPineconeUpdateEmbedding{ + DestinationPineconeUpdateEmbeddingFake: destinationPineconeUpdateEmbeddingFake, + } + } + index := r.Configuration.Indexing.Index.ValueString() + pineconeEnvironment := r.Configuration.Indexing.PineconeEnvironment.ValueString() + pineconeKey := r.Configuration.Indexing.PineconeKey.ValueString() + indexing := shared.DestinationPineconeUpdateIndexing{ + Index: index, + PineconeEnvironment: pineconeEnvironment, + PineconeKey: pineconeKey, + } + chunkOverlap := new(int64) + if !r.Configuration.Processing.ChunkOverlap.IsUnknown() && !r.Configuration.Processing.ChunkOverlap.IsNull() { + *chunkOverlap = r.Configuration.Processing.ChunkOverlap.ValueInt64() + } else { + chunkOverlap = nil + } + chunkSize := r.Configuration.Processing.ChunkSize.ValueInt64() + var metadataFields []string = nil + for _, metadataFieldsItem := range r.Configuration.Processing.MetadataFields { + metadataFields = append(metadataFields, metadataFieldsItem.ValueString()) + } + var textFields []string = nil + for _, textFieldsItem := range r.Configuration.Processing.TextFields { + textFields = append(textFields, textFieldsItem.ValueString()) + } + processing := shared.DestinationPineconeUpdateProcessingConfigModel{ + ChunkOverlap: chunkOverlap, + ChunkSize: chunkSize, + MetadataFields: metadataFields, + TextFields: textFields, + } + configuration := shared.DestinationPineconeUpdate{ + Embedding: embedding, + Indexing: indexing, + Processing: processing, + } + name := r.Name.ValueString() + workspaceID := r.WorkspaceID.ValueString() + out := shared.DestinationPineconePutRequest{ + Configuration: configuration, + Name: name, + WorkspaceID: workspaceID, + } + return &out +} + +func (r *DestinationPineconeResourceModel) ToDeleteSDKType() *shared.DestinationPineconeCreateRequest { + out := r.ToCreateSDKType() + return out +} + +func (r *DestinationPineconeResourceModel) RefreshFromGetResponse(resp *shared.DestinationResponse) { + r.DestinationID = types.StringValue(resp.DestinationID) + r.DestinationType = types.StringValue(resp.DestinationType) + r.Name = types.StringValue(resp.Name) + r.WorkspaceID = types.StringValue(resp.WorkspaceID) +} + +func (r *DestinationPineconeResourceModel) RefreshFromCreateResponse(resp *shared.DestinationResponse) { + r.RefreshFromGetResponse(resp) +} diff --git a/internal/provider/destination_snowflake_data_source.go b/internal/provider/destination_snowflake_data_source.go index 9ed2d64b4..495b4c89a 100755 --- a/internal/provider/destination_snowflake_data_source.go +++ b/internal/provider/destination_snowflake_data_source.go @@ -223,7 +223,7 @@ func (r *DestinationSnowflakeDataSource) Schema(ctx context.Context, req datasou }, "raw_data_schema": schema.StringAttribute{ Computed: true, - Description: `(Beta) The schema to write raw tables into`, + Description: `The schema to write raw tables into`, }, "role": schema.StringAttribute{ Computed: true, @@ -233,10 +233,6 @@ func (r *DestinationSnowflakeDataSource) Schema(ctx context.Context, req datasou Computed: true, Description: `Enter the name of the default schema`, }, - "use_1s1t_format": schema.BoolAttribute{ - Computed: true, - Description: `(Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program.`, - }, "username": schema.StringAttribute{ Computed: true, Description: `Enter the name of the user you want to use to access the database`, diff --git a/internal/provider/destination_snowflake_resource.go b/internal/provider/destination_snowflake_resource.go index aa67dddd9..8b76ae715 100755 --- a/internal/provider/destination_snowflake_resource.go +++ b/internal/provider/destination_snowflake_resource.go @@ -225,7 +225,7 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource. }, "raw_data_schema": schema.StringAttribute{ Optional: true, - Description: `(Beta) The schema to write raw tables into`, + Description: `The schema to write raw tables into`, }, "role": schema.StringAttribute{ Required: true, @@ -235,10 +235,6 @@ func (r *DestinationSnowflakeResource) Schema(ctx context.Context, req resource. Required: true, Description: `Enter the name of the default schema`, }, - "use_1s1t_format": schema.BoolAttribute{ - Optional: true, - Description: `(Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program.`, - }, "username": schema.StringAttribute{ Required: true, Description: `Enter the name of the user you want to use to access the database`, diff --git a/internal/provider/destination_snowflake_resource_sdk.go b/internal/provider/destination_snowflake_resource_sdk.go index f3ed7334a..e7e7fe834 100755 --- a/internal/provider/destination_snowflake_resource_sdk.go +++ b/internal/provider/destination_snowflake_resource_sdk.go @@ -108,12 +108,6 @@ func (r *DestinationSnowflakeResourceModel) ToCreateSDKType() *shared.Destinatio } role := r.Configuration.Role.ValueString() schema := r.Configuration.Schema.ValueString() - use1s1tFormat := new(bool) - if !r.Configuration.Use1s1tFormat.IsUnknown() && !r.Configuration.Use1s1tFormat.IsNull() { - *use1s1tFormat = r.Configuration.Use1s1tFormat.ValueBool() - } else { - use1s1tFormat = nil - } username := r.Configuration.Username.ValueString() warehouse := r.Configuration.Warehouse.ValueString() configuration := shared.DestinationSnowflake{ @@ -125,7 +119,6 @@ func (r *DestinationSnowflakeResourceModel) ToCreateSDKType() *shared.Destinatio RawDataSchema: rawDataSchema, Role: role, Schema: schema, - Use1s1tFormat: use1s1tFormat, Username: username, Warehouse: warehouse, } @@ -244,12 +237,6 @@ func (r *DestinationSnowflakeResourceModel) ToUpdateSDKType() *shared.Destinatio } role := r.Configuration.Role.ValueString() schema := r.Configuration.Schema.ValueString() - use1s1tFormat := new(bool) - if !r.Configuration.Use1s1tFormat.IsUnknown() && !r.Configuration.Use1s1tFormat.IsNull() { - *use1s1tFormat = r.Configuration.Use1s1tFormat.ValueBool() - } else { - use1s1tFormat = nil - } username := r.Configuration.Username.ValueString() warehouse := r.Configuration.Warehouse.ValueString() configuration := shared.DestinationSnowflakeUpdate{ @@ -260,7 +247,6 @@ func (r *DestinationSnowflakeResourceModel) ToUpdateSDKType() *shared.Destinatio RawDataSchema: rawDataSchema, Role: role, Schema: schema, - Use1s1tFormat: use1s1tFormat, Username: username, Warehouse: warehouse, } diff --git a/internal/provider/destination_typesense_data_source.go b/internal/provider/destination_typesense_data_source.go index 2320fe206..f60aef7f5 100755 --- a/internal/provider/destination_typesense_data_source.go +++ b/internal/provider/destination_typesense_data_source.go @@ -55,7 +55,7 @@ func (r *DestinationTypesenseDataSource) Schema(ctx context.Context, req datasou Computed: true, Description: `Typesense API Key`, }, - "batch_size": schema.StringAttribute{ + "batch_size": schema.Int64Attribute{ Computed: true, Description: `How many documents should be imported together. Default 1000`, }, diff --git a/internal/provider/destination_typesense_resource.go b/internal/provider/destination_typesense_resource.go index c10aa4f7d..48641bfd0 100755 --- a/internal/provider/destination_typesense_resource.go +++ b/internal/provider/destination_typesense_resource.go @@ -57,7 +57,7 @@ func (r *DestinationTypesenseResource) Schema(ctx context.Context, req resource. Required: true, Description: `Typesense API Key`, }, - "batch_size": schema.StringAttribute{ + "batch_size": schema.Int64Attribute{ Optional: true, Description: `How many documents should be imported together. Default 1000`, }, diff --git a/internal/provider/destination_typesense_resource_sdk.go b/internal/provider/destination_typesense_resource_sdk.go index 0862ebfe0..2b3b09610 100755 --- a/internal/provider/destination_typesense_resource_sdk.go +++ b/internal/provider/destination_typesense_resource_sdk.go @@ -9,9 +9,9 @@ import ( func (r *DestinationTypesenseResourceModel) ToCreateSDKType() *shared.DestinationTypesenseCreateRequest { apiKey := r.Configuration.APIKey.ValueString() - batchSize := new(string) + batchSize := new(int64) if !r.Configuration.BatchSize.IsUnknown() && !r.Configuration.BatchSize.IsNull() { - *batchSize = r.Configuration.BatchSize.ValueString() + *batchSize = r.Configuration.BatchSize.ValueInt64() } else { batchSize = nil } @@ -54,9 +54,9 @@ func (r *DestinationTypesenseResourceModel) ToGetSDKType() *shared.DestinationTy func (r *DestinationTypesenseResourceModel) ToUpdateSDKType() *shared.DestinationTypesensePutRequest { apiKey := r.Configuration.APIKey.ValueString() - batchSize := new(string) + batchSize := new(int64) if !r.Configuration.BatchSize.IsUnknown() && !r.Configuration.BatchSize.IsNull() { - *batchSize = r.Configuration.BatchSize.ValueString() + *batchSize = r.Configuration.BatchSize.ValueInt64() } else { batchSize = nil } diff --git a/internal/provider/provider.go b/internal/provider/provider.go index 6427a13cc..e490090df 100755 --- a/internal/provider/provider.go +++ b/internal/provider/provider.go @@ -126,10 +126,12 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou NewDestinationKeenResource, NewDestinationKinesisResource, NewDestinationLangchainResource, + NewDestinationMilvusResource, NewDestinationMongodbResource, NewDestinationMssqlResource, NewDestinationMysqlResource, NewDestinationOracleResource, + NewDestinationPineconeResource, NewDestinationPostgresResource, NewDestinationPubsubResource, NewDestinationRedisResource, @@ -175,7 +177,6 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou NewSourceConfigcatResource, NewSourceConfluenceResource, NewSourceConvexResource, - NewSourceDatadogResource, NewSourceDatascopeResource, NewSourceDelightedResource, NewSourceDixaResource, @@ -253,7 +254,6 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou NewSourceOktaResource, NewSourceOmnisendResource, NewSourceOnesignalResource, - NewSourceOpenweatherResource, NewSourceOracleResource, NewSourceOrbResource, NewSourceOrbitResource, @@ -273,7 +273,6 @@ func (p *AirbyteProvider) Resources(ctx context.Context) []func() resource.Resou NewSourcePosthogResource, NewSourcePostmarkappResource, NewSourcePrestashopResource, - NewSourcePublicApisResource, NewSourcePunkAPIResource, NewSourcePypiResource, NewSourceQualarooResource, @@ -370,10 +369,12 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D NewDestinationKeenDataSource, NewDestinationKinesisDataSource, NewDestinationLangchainDataSource, + NewDestinationMilvusDataSource, NewDestinationMongodbDataSource, NewDestinationMssqlDataSource, NewDestinationMysqlDataSource, NewDestinationOracleDataSource, + NewDestinationPineconeDataSource, NewDestinationPostgresDataSource, NewDestinationPubsubDataSource, NewDestinationRedisDataSource, @@ -419,7 +420,6 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D NewSourceConfigcatDataSource, NewSourceConfluenceDataSource, NewSourceConvexDataSource, - NewSourceDatadogDataSource, NewSourceDatascopeDataSource, NewSourceDelightedDataSource, NewSourceDixaDataSource, @@ -497,7 +497,6 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D NewSourceOktaDataSource, NewSourceOmnisendDataSource, NewSourceOnesignalDataSource, - NewSourceOpenweatherDataSource, NewSourceOracleDataSource, NewSourceOrbDataSource, NewSourceOrbitDataSource, @@ -517,7 +516,6 @@ func (p *AirbyteProvider) DataSources(ctx context.Context) []func() datasource.D NewSourcePosthogDataSource, NewSourcePostmarkappDataSource, NewSourcePrestashopDataSource, - NewSourcePublicApisDataSource, NewSourcePunkAPIDataSource, NewSourcePypiDataSource, NewSourceQualarooDataSource, diff --git a/internal/provider/source_amazonads_data_source.go b/internal/provider/source_amazonads_data_source.go index 7024069ec..7642e27f6 100755 --- a/internal/provider/source_amazonads_data_source.go +++ b/internal/provider/source_amazonads_data_source.go @@ -73,10 +73,15 @@ func (r *SourceAmazonAdsDataSource) Schema(ctx context.Context, req datasource.S Computed: true, Description: `The amount of days to go back in time to get the updated data from Amazon Ads`, }, + "marketplace_ids": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.`, + }, "profiles": schema.ListAttribute{ Computed: true, ElementType: types.Int64Type, - Description: `Profile IDs you want to fetch data for. See docs for more details.`, + Description: `Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.`, }, "refresh_token": schema.StringAttribute{ Computed: true, diff --git a/internal/provider/source_amazonads_resource.go b/internal/provider/source_amazonads_resource.go index 33c71f958..7a38ce68f 100755 --- a/internal/provider/source_amazonads_resource.go +++ b/internal/provider/source_amazonads_resource.go @@ -75,10 +75,15 @@ func (r *SourceAmazonAdsResource) Schema(ctx context.Context, req resource.Schem Optional: true, Description: `The amount of days to go back in time to get the updated data from Amazon Ads`, }, + "marketplace_ids": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.`, + }, "profiles": schema.ListAttribute{ Optional: true, ElementType: types.Int64Type, - Description: `Profile IDs you want to fetch data for. See docs for more details.`, + Description: `Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID.`, }, "refresh_token": schema.StringAttribute{ Required: true, diff --git a/internal/provider/source_amazonads_resource_sdk.go b/internal/provider/source_amazonads_resource_sdk.go index dc76c1f6c..698480e47 100755 --- a/internal/provider/source_amazonads_resource_sdk.go +++ b/internal/provider/source_amazonads_resource_sdk.go @@ -22,6 +22,10 @@ func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAds } else { lookBackWindow = nil } + var marketplaceIds []string = nil + for _, marketplaceIdsItem := range r.Configuration.MarketplaceIds { + marketplaceIds = append(marketplaceIds, marketplaceIdsItem.ValueString()) + } var profiles []int64 = nil for _, profilesItem := range r.Configuration.Profiles { profiles = append(profiles, profilesItem.ValueInt64()) @@ -53,6 +57,7 @@ func (r *SourceAmazonAdsResourceModel) ToCreateSDKType() *shared.SourceAmazonAds ClientID: clientID, ClientSecret: clientSecret, LookBackWindow: lookBackWindow, + MarketplaceIds: marketplaceIds, Profiles: profiles, RefreshToken: refreshToken, Region: region, @@ -98,6 +103,10 @@ func (r *SourceAmazonAdsResourceModel) ToUpdateSDKType() *shared.SourceAmazonAds } else { lookBackWindow = nil } + var marketplaceIds []string = nil + for _, marketplaceIdsItem := range r.Configuration.MarketplaceIds { + marketplaceIds = append(marketplaceIds, marketplaceIdsItem.ValueString()) + } var profiles []int64 = nil for _, profilesItem := range r.Configuration.Profiles { profiles = append(profiles, profilesItem.ValueInt64()) @@ -128,6 +137,7 @@ func (r *SourceAmazonAdsResourceModel) ToUpdateSDKType() *shared.SourceAmazonAds ClientID: clientID, ClientSecret: clientSecret, LookBackWindow: lookBackWindow, + MarketplaceIds: marketplaceIds, Profiles: profiles, RefreshToken: refreshToken, Region: region, diff --git a/internal/provider/source_apifydataset_data_source.go b/internal/provider/source_apifydataset_data_source.go index 218f85d9e..50312adf6 100755 --- a/internal/provider/source_apifydataset_data_source.go +++ b/internal/provider/source_apifydataset_data_source.go @@ -69,6 +69,10 @@ func (r *SourceApifyDatasetDataSource) Schema(ctx context.Context, req datasourc }, Description: `must be one of ["apify-dataset"]`, }, + "token": schema.StringAttribute{ + Computed: true, + Description: `Your application's Client Secret. You can find this value on the console integrations tab after you login.`, + }, }, }, "name": schema.StringAttribute{ diff --git a/internal/provider/source_apifydataset_resource.go b/internal/provider/source_apifydataset_resource.go index 201312e70..95eba777a 100755 --- a/internal/provider/source_apifydataset_resource.go +++ b/internal/provider/source_apifydataset_resource.go @@ -59,7 +59,7 @@ func (r *SourceApifyDatasetResource) Schema(ctx context.Context, req resource.Sc Description: `If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false.`, }, "dataset_id": schema.StringAttribute{ - Required: true, + Optional: true, Description: `ID of the dataset you would like to load to Airbyte.`, }, "source_type": schema.StringAttribute{ @@ -71,6 +71,10 @@ func (r *SourceApifyDatasetResource) Schema(ctx context.Context, req resource.Sc }, Description: `must be one of ["apify-dataset"]`, }, + "token": schema.StringAttribute{ + Required: true, + Description: `Your application's Client Secret. You can find this value on the console integrations tab after you login.`, + }, }, }, "name": schema.StringAttribute{ diff --git a/internal/provider/source_apifydataset_resource_sdk.go b/internal/provider/source_apifydataset_resource_sdk.go index 86bb9ab59..42e8502df 100755 --- a/internal/provider/source_apifydataset_resource_sdk.go +++ b/internal/provider/source_apifydataset_resource_sdk.go @@ -14,12 +14,19 @@ func (r *SourceApifyDatasetResourceModel) ToCreateSDKType() *shared.SourceApifyD } else { clean = nil } - datasetID := r.Configuration.DatasetID.ValueString() + datasetID := new(string) + if !r.Configuration.DatasetID.IsUnknown() && !r.Configuration.DatasetID.IsNull() { + *datasetID = r.Configuration.DatasetID.ValueString() + } else { + datasetID = nil + } sourceType := shared.SourceApifyDatasetApifyDataset(r.Configuration.SourceType.ValueString()) + token := r.Configuration.Token.ValueString() configuration := shared.SourceApifyDataset{ Clean: clean, DatasetID: datasetID, SourceType: sourceType, + Token: token, } name := r.Name.ValueString() secretID := new(string) @@ -50,10 +57,17 @@ func (r *SourceApifyDatasetResourceModel) ToUpdateSDKType() *shared.SourceApifyD } else { clean = nil } - datasetID := r.Configuration.DatasetID.ValueString() + datasetID := new(string) + if !r.Configuration.DatasetID.IsUnknown() && !r.Configuration.DatasetID.IsNull() { + *datasetID = r.Configuration.DatasetID.ValueString() + } else { + datasetID = nil + } + token := r.Configuration.Token.ValueString() configuration := shared.SourceApifyDatasetUpdate{ Clean: clean, DatasetID: datasetID, + Token: token, } name := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() diff --git a/internal/provider/source_auth0_data_source.go b/internal/provider/source_auth0_data_source.go index 5b700044b..5d2897b1b 100755 --- a/internal/provider/source_auth0_data_source.go +++ b/internal/provider/source_auth0_data_source.go @@ -162,6 +162,10 @@ func (r *SourceAuth0DataSource) Schema(ctx context.Context, req datasource.Schem }, Description: `must be one of ["auth0"]`, }, + "start_date": schema.StringAttribute{ + Computed: true, + Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`, + }, }, }, "name": schema.StringAttribute{ diff --git a/internal/provider/source_auth0_resource.go b/internal/provider/source_auth0_resource.go index 82983954d..257fe4860 100755 --- a/internal/provider/source_auth0_resource.go +++ b/internal/provider/source_auth0_resource.go @@ -164,6 +164,10 @@ func (r *SourceAuth0Resource) Schema(ctx context.Context, req resource.SchemaReq }, Description: `must be one of ["auth0"]`, }, + "start_date": schema.StringAttribute{ + Optional: true, + Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated.`, + }, }, }, "name": schema.StringAttribute{ diff --git a/internal/provider/source_auth0_resource_sdk.go b/internal/provider/source_auth0_resource_sdk.go index ca42b7e82..c898820df 100755 --- a/internal/provider/source_auth0_resource_sdk.go +++ b/internal/provider/source_auth0_resource_sdk.go @@ -43,10 +43,17 @@ func (r *SourceAuth0ResourceModel) ToCreateSDKType() *shared.SourceAuth0CreateRe } } sourceType := shared.SourceAuth0Auth0(r.Configuration.SourceType.ValueString()) + startDate := new(string) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate = r.Configuration.StartDate.ValueString() + } else { + startDate = nil + } configuration := shared.SourceAuth0{ BaseURL: baseURL, Credentials: credentials, SourceType: sourceType, + StartDate: startDate, } name := r.Name.ValueString() secretID := new(string) @@ -105,9 +112,16 @@ func (r *SourceAuth0ResourceModel) ToUpdateSDKType() *shared.SourceAuth0PutReque SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken: sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken, } } + startDate := new(string) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate = r.Configuration.StartDate.ValueString() + } else { + startDate = nil + } configuration := shared.SourceAuth0Update{ BaseURL: baseURL, Credentials: credentials, + StartDate: startDate, } name := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() diff --git a/internal/provider/source_datadog_data_source.go b/internal/provider/source_datadog_data_source.go deleted file mode 100755 index 86919c484..000000000 --- a/internal/provider/source_datadog_data_source.go +++ /dev/null @@ -1,216 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk" - "airbyte/internal/sdk/pkg/models/operations" - "context" - "fmt" - - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -// Ensure provider defined types fully satisfy framework interfaces. -var _ datasource.DataSource = &SourceDatadogDataSource{} -var _ datasource.DataSourceWithConfigure = &SourceDatadogDataSource{} - -func NewSourceDatadogDataSource() datasource.DataSource { - return &SourceDatadogDataSource{} -} - -// SourceDatadogDataSource is the data source implementation. -type SourceDatadogDataSource struct { - client *sdk.SDK -} - -// SourceDatadogDataSourceModel describes the data model. -type SourceDatadogDataSourceModel struct { - Configuration SourceDatadog `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - WorkspaceID types.String `tfsdk:"workspace_id"` -} - -// Metadata returns the data source type name. -func (r *SourceDatadogDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_source_datadog" -} - -// Schema defines the schema for the data source. -func (r *SourceDatadogDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "SourceDatadog DataSource", - - Attributes: map[string]schema.Attribute{ - "configuration": schema.SingleNestedAttribute{ - Computed: true, - Attributes: map[string]schema.Attribute{ - "api_key": schema.StringAttribute{ - Computed: true, - Description: `Datadog API key`, - }, - "application_key": schema.StringAttribute{ - Computed: true, - Description: `Datadog application key`, - }, - "end_date": schema.StringAttribute{ - Computed: true, - Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs.`, - }, - "max_records_per_request": schema.Int64Attribute{ - Computed: true, - Description: `Maximum number of records to collect per request.`, - }, - "queries": schema.ListNestedAttribute{ - Computed: true, - NestedObject: schema.NestedAttributeObject{ - Attributes: map[string]schema.Attribute{ - "data_source": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "metrics", - "cloud_cost", - "logs", - "rum", - ), - }, - MarkdownDescription: `must be one of ["metrics", "cloud_cost", "logs", "rum"]` + "\n" + - `A data source that is powered by the platform.`, - }, - "name": schema.StringAttribute{ - Computed: true, - Description: `The variable name for use in queries.`, - }, - "query": schema.StringAttribute{ - Computed: true, - Description: `A classic query string.`, - }, - }, - }, - Description: `List of queries to be run and used as inputs.`, - }, - "query": schema.StringAttribute{ - Computed: true, - Description: `The search query. This just applies to Incremental syncs. If empty, it'll collect all logs.`, - }, - "site": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "datadoghq.com", - "us3.datadoghq.com", - "us5.datadoghq.com", - "datadoghq.eu", - "ddog-gov.com", - ), - }, - MarkdownDescription: `must be one of ["datadoghq.com", "us3.datadoghq.com", "us5.datadoghq.com", "datadoghq.eu", "ddog-gov.com"]` + "\n" + - `The site where Datadog data resides in.`, - }, - "source_type": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "datadog", - ), - }, - Description: `must be one of ["datadog"]`, - }, - "start_date": schema.StringAttribute{ - Computed: true, - Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs.`, - }, - }, - }, - "name": schema.StringAttribute{ - Computed: true, - }, - "secret_id": schema.StringAttribute{ - Optional: true, - Description: `Optional secretID obtained through the public API OAuth redirect flow.`, - }, - "source_id": schema.StringAttribute{ - Required: true, - }, - "workspace_id": schema.StringAttribute{ - Computed: true, - }, - }, - } -} - -func (r *SourceDatadogDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - client, ok := req.ProviderData.(*sdk.SDK) - - if !ok { - resp.Diagnostics.AddError( - "Unexpected DataSource Configure Type", - fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - - return - } - - r.client = client -} - -func (r *SourceDatadogDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data *SourceDatadogDataSourceModel - var item types.Object - - resp.Diagnostics.Append(req.Config.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.GetSourceDatadogRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.GetSourceDatadog(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromGetResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} diff --git a/internal/provider/source_datadog_resource.go b/internal/provider/source_datadog_resource.go deleted file mode 100755 index cbee57a45..000000000 --- a/internal/provider/source_datadog_resource.go +++ /dev/null @@ -1,382 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk" - "context" - "fmt" - - speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier" - "airbyte/internal/sdk/pkg/models/operations" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -// Ensure provider defined types fully satisfy framework interfaces. -var _ resource.Resource = &SourceDatadogResource{} -var _ resource.ResourceWithImportState = &SourceDatadogResource{} - -func NewSourceDatadogResource() resource.Resource { - return &SourceDatadogResource{} -} - -// SourceDatadogResource defines the resource implementation. -type SourceDatadogResource struct { - client *sdk.SDK -} - -// SourceDatadogResourceModel describes the resource data model. -type SourceDatadogResourceModel struct { - Configuration SourceDatadog `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - SourceType types.String `tfsdk:"source_type"` - WorkspaceID types.String `tfsdk:"workspace_id"` -} - -func (r *SourceDatadogResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_source_datadog" -} - -func (r *SourceDatadogResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "SourceDatadog Resource", - - Attributes: map[string]schema.Attribute{ - "configuration": schema.SingleNestedAttribute{ - Required: true, - Attributes: map[string]schema.Attribute{ - "api_key": schema.StringAttribute{ - Required: true, - Description: `Datadog API key`, - }, - "application_key": schema.StringAttribute{ - Required: true, - Description: `Datadog application key`, - }, - "end_date": schema.StringAttribute{ - Optional: true, - Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs.`, - }, - "max_records_per_request": schema.Int64Attribute{ - Optional: true, - Description: `Maximum number of records to collect per request.`, - }, - "queries": schema.ListNestedAttribute{ - Optional: true, - NestedObject: schema.NestedAttributeObject{ - Attributes: map[string]schema.Attribute{ - "data_source": schema.StringAttribute{ - Required: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "metrics", - "cloud_cost", - "logs", - "rum", - ), - }, - MarkdownDescription: `must be one of ["metrics", "cloud_cost", "logs", "rum"]` + "\n" + - `A data source that is powered by the platform.`, - }, - "name": schema.StringAttribute{ - Required: true, - Description: `The variable name for use in queries.`, - }, - "query": schema.StringAttribute{ - Required: true, - Description: `A classic query string.`, - }, - }, - }, - Description: `List of queries to be run and used as inputs.`, - }, - "query": schema.StringAttribute{ - Optional: true, - Description: `The search query. This just applies to Incremental syncs. If empty, it'll collect all logs.`, - }, - "site": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "datadoghq.com", - "us3.datadoghq.com", - "us5.datadoghq.com", - "datadoghq.eu", - "ddog-gov.com", - ), - }, - MarkdownDescription: `must be one of ["datadoghq.com", "us3.datadoghq.com", "us5.datadoghq.com", "datadoghq.eu", "ddog-gov.com"]` + "\n" + - `The site where Datadog data resides in.`, - }, - "source_type": schema.StringAttribute{ - Required: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "datadog", - ), - }, - Description: `must be one of ["datadog"]`, - }, - "start_date": schema.StringAttribute{ - Optional: true, - Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs.`, - }, - }, - }, - "name": schema.StringAttribute{ - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - Required: true, - }, - "secret_id": schema.StringAttribute{ - Optional: true, - Description: `Optional secretID obtained through the public API OAuth redirect flow.`, - }, - "source_id": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - }, - "source_type": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - }, - "workspace_id": schema.StringAttribute{ - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - Required: true, - }, - }, - } -} - -func (r *SourceDatadogResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - client, ok := req.ProviderData.(*sdk.SDK) - - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - - return - } - - r.client = client -} - -func (r *SourceDatadogResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data *SourceDatadogResourceModel - var item types.Object - - resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - request := *data.ToCreateSDKType() - res, err := r.client.Sources.CreateSourceDatadog(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromCreateResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourceDatadogResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data *SourceDatadogResourceModel - var item types.Object - - resp.Diagnostics.Append(req.State.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.GetSourceDatadogRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.GetSourceDatadog(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromGetResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourceDatadogResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data *SourceDatadogResourceModel - merge(ctx, req, resp, &data) - if resp.Diagnostics.HasError() { - return - } - - sourceDatadogPutRequest := data.ToUpdateSDKType() - sourceID := data.SourceID.ValueString() - request := operations.PutSourceDatadogRequest{ - SourceDatadogPutRequest: sourceDatadogPutRequest, - SourceID: sourceID, - } - res, err := r.client.Sources.PutSourceDatadog(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - sourceId1 := data.SourceID.ValueString() - getRequest := operations.GetSourceDatadogRequest{ - SourceID: sourceId1, - } - getResponse, err := r.client.Sources.GetSourceDatadog(ctx, getRequest) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if getResponse == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse)) - return - } - if getResponse.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse)) - return - } - if getResponse.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse)) - return - } - data.RefreshFromGetResponse(getResponse.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourceDatadogResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data *SourceDatadogResourceModel - var item types.Object - - resp.Diagnostics.Append(req.State.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.DeleteSourceDatadogRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.DeleteSourceDatadog(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - -} - -func (r *SourceDatadogResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp) -} diff --git a/internal/provider/source_datadog_resource_sdk.go b/internal/provider/source_datadog_resource_sdk.go deleted file mode 100755 index 8fac33f06..000000000 --- a/internal/provider/source_datadog_resource_sdk.go +++ /dev/null @@ -1,166 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "github.com/hashicorp/terraform-plugin-framework/types" -) - -func (r *SourceDatadogResourceModel) ToCreateSDKType() *shared.SourceDatadogCreateRequest { - apiKey := r.Configuration.APIKey.ValueString() - applicationKey := r.Configuration.ApplicationKey.ValueString() - endDate := new(string) - if !r.Configuration.EndDate.IsUnknown() && !r.Configuration.EndDate.IsNull() { - *endDate = r.Configuration.EndDate.ValueString() - } else { - endDate = nil - } - maxRecordsPerRequest := new(int64) - if !r.Configuration.MaxRecordsPerRequest.IsUnknown() && !r.Configuration.MaxRecordsPerRequest.IsNull() { - *maxRecordsPerRequest = r.Configuration.MaxRecordsPerRequest.ValueInt64() - } else { - maxRecordsPerRequest = nil - } - var queries []shared.SourceDatadogQueries = nil - for _, queriesItem := range r.Configuration.Queries { - dataSource := shared.SourceDatadogQueriesDataSource(queriesItem.DataSource.ValueString()) - name := queriesItem.Name.ValueString() - query := queriesItem.Query.ValueString() - queries = append(queries, shared.SourceDatadogQueries{ - DataSource: dataSource, - Name: name, - Query: query, - }) - } - query1 := new(string) - if !r.Configuration.Query.IsUnknown() && !r.Configuration.Query.IsNull() { - *query1 = r.Configuration.Query.ValueString() - } else { - query1 = nil - } - site := new(shared.SourceDatadogSite) - if !r.Configuration.Site.IsUnknown() && !r.Configuration.Site.IsNull() { - *site = shared.SourceDatadogSite(r.Configuration.Site.ValueString()) - } else { - site = nil - } - sourceType := shared.SourceDatadogDatadog(r.Configuration.SourceType.ValueString()) - startDate := new(string) - if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { - *startDate = r.Configuration.StartDate.ValueString() - } else { - startDate = nil - } - configuration := shared.SourceDatadog{ - APIKey: apiKey, - ApplicationKey: applicationKey, - EndDate: endDate, - MaxRecordsPerRequest: maxRecordsPerRequest, - Queries: queries, - Query: query1, - Site: site, - SourceType: sourceType, - StartDate: startDate, - } - name1 := r.Name.ValueString() - secretID := new(string) - if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() { - *secretID = r.SecretID.ValueString() - } else { - secretID = nil - } - workspaceID := r.WorkspaceID.ValueString() - out := shared.SourceDatadogCreateRequest{ - Configuration: configuration, - Name: name1, - SecretID: secretID, - WorkspaceID: workspaceID, - } - return &out -} - -func (r *SourceDatadogResourceModel) ToGetSDKType() *shared.SourceDatadogCreateRequest { - out := r.ToCreateSDKType() - return out -} - -func (r *SourceDatadogResourceModel) ToUpdateSDKType() *shared.SourceDatadogPutRequest { - apiKey := r.Configuration.APIKey.ValueString() - applicationKey := r.Configuration.ApplicationKey.ValueString() - endDate := new(string) - if !r.Configuration.EndDate.IsUnknown() && !r.Configuration.EndDate.IsNull() { - *endDate = r.Configuration.EndDate.ValueString() - } else { - endDate = nil - } - maxRecordsPerRequest := new(int64) - if !r.Configuration.MaxRecordsPerRequest.IsUnknown() && !r.Configuration.MaxRecordsPerRequest.IsNull() { - *maxRecordsPerRequest = r.Configuration.MaxRecordsPerRequest.ValueInt64() - } else { - maxRecordsPerRequest = nil - } - var queries []shared.SourceDatadogUpdateQueries = nil - for _, queriesItem := range r.Configuration.Queries { - dataSource := shared.SourceDatadogUpdateQueriesDataSource(queriesItem.DataSource.ValueString()) - name := queriesItem.Name.ValueString() - query := queriesItem.Query.ValueString() - queries = append(queries, shared.SourceDatadogUpdateQueries{ - DataSource: dataSource, - Name: name, - Query: query, - }) - } - query1 := new(string) - if !r.Configuration.Query.IsUnknown() && !r.Configuration.Query.IsNull() { - *query1 = r.Configuration.Query.ValueString() - } else { - query1 = nil - } - site := new(shared.SourceDatadogUpdateSite) - if !r.Configuration.Site.IsUnknown() && !r.Configuration.Site.IsNull() { - *site = shared.SourceDatadogUpdateSite(r.Configuration.Site.ValueString()) - } else { - site = nil - } - startDate := new(string) - if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { - *startDate = r.Configuration.StartDate.ValueString() - } else { - startDate = nil - } - configuration := shared.SourceDatadogUpdate{ - APIKey: apiKey, - ApplicationKey: applicationKey, - EndDate: endDate, - MaxRecordsPerRequest: maxRecordsPerRequest, - Queries: queries, - Query: query1, - Site: site, - StartDate: startDate, - } - name1 := r.Name.ValueString() - workspaceID := r.WorkspaceID.ValueString() - out := shared.SourceDatadogPutRequest{ - Configuration: configuration, - Name: name1, - WorkspaceID: workspaceID, - } - return &out -} - -func (r *SourceDatadogResourceModel) ToDeleteSDKType() *shared.SourceDatadogCreateRequest { - out := r.ToCreateSDKType() - return out -} - -func (r *SourceDatadogResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) { - r.Name = types.StringValue(resp.Name) - r.SourceID = types.StringValue(resp.SourceID) - r.SourceType = types.StringValue(resp.SourceType) - r.WorkspaceID = types.StringValue(resp.WorkspaceID) -} - -func (r *SourceDatadogResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) { - r.RefreshFromGetResponse(resp) -} diff --git a/internal/provider/source_googleads_data_source.go b/internal/provider/source_googleads_data_source.go index a2d407cbe..db2bb51ab 100755 --- a/internal/provider/source_googleads_data_source.go +++ b/internal/provider/source_googleads_data_source.go @@ -106,7 +106,7 @@ func (r *SourceGoogleAdsDataSource) Schema(ctx context.Context, req datasource.S Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)`, }, "login_customer_id": schema.StringAttribute{ Computed: true, @@ -126,7 +126,7 @@ func (r *SourceGoogleAdsDataSource) Schema(ctx context.Context, req datasource.S Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)`, }, }, }, diff --git a/internal/provider/source_googleads_resource.go b/internal/provider/source_googleads_resource.go index f93ec2a12..03201a523 100755 --- a/internal/provider/source_googleads_resource.go +++ b/internal/provider/source_googleads_resource.go @@ -108,7 +108,7 @@ func (r *SourceGoogleAdsResource) Schema(ctx context.Context, req resource.Schem Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set)`, }, "login_customer_id": schema.StringAttribute{ Optional: true, @@ -124,11 +124,11 @@ func (r *SourceGoogleAdsResource) Schema(ctx context.Context, req resource.Schem Description: `must be one of ["google-ads"]`, }, "start_date": schema.StringAttribute{ - Required: true, + Optional: true, Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set)`, }, }, }, diff --git a/internal/provider/source_googleads_resource_sdk.go b/internal/provider/source_googleads_resource_sdk.go index d59e28d96..1e5453a54 100755 --- a/internal/provider/source_googleads_resource_sdk.go +++ b/internal/provider/source_googleads_resource_sdk.go @@ -55,7 +55,12 @@ func (r *SourceGoogleAdsResourceModel) ToCreateSDKType() *shared.SourceGoogleAds loginCustomerID = nil } sourceType := shared.SourceGoogleAdsGoogleAds(r.Configuration.SourceType.ValueString()) - startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString()) + startDate := new(customTypes.Date) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceGoogleAds{ ConversionWindowDays: conversionWindowDays, Credentials: credentials, @@ -134,7 +139,12 @@ func (r *SourceGoogleAdsResourceModel) ToUpdateSDKType() *shared.SourceGoogleAds } else { loginCustomerID = nil } - startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString()) + startDate := new(customTypes.Date) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceGoogleAdsUpdate{ ConversionWindowDays: conversionWindowDays, Credentials: credentials, diff --git a/internal/provider/source_googlesearchconsole_data_source.go b/internal/provider/source_googlesearchconsole_data_source.go index d5e8ec934..22daef8b9 100755 --- a/internal/provider/source_googlesearchconsole_data_source.go +++ b/internal/provider/source_googlesearchconsole_data_source.go @@ -167,7 +167,24 @@ func (r *SourceGoogleSearchConsoleDataSource) Schema(ctx context.Context, req da }, "custom_reports": schema.StringAttribute{ Computed: true, - Description: `A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field.`, + Description: `(DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.`, + }, + "custom_reports_array": schema.ListNestedAttribute{ + Computed: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "dimensions": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A list of dimensions (country, date, device, page, query)`, + }, + "name": schema.StringAttribute{ + Computed: true, + Description: `The name of the custom report, this name would be used as stream name`, + }, + }, + }, + Description: `You can add your Custom Analytics report by creating one.`, }, "data_state": schema.StringAttribute{ Computed: true, @@ -178,19 +195,19 @@ func (r *SourceGoogleSearchConsoleDataSource) Schema(ctx context.Context, req da ), }, MarkdownDescription: `must be one of ["final", "all"]` + "\n" + - `If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data.`, + `If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.`, }, "end_date": schema.StringAttribute{ Computed: true, Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field.`, + Description: `UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.`, }, "site_urls": schema.ListAttribute{ Computed: true, ElementType: types.StringType, - Description: `The URLs of the website property attached to your GSC account. Read more here.`, + Description: `The URLs of the website property attached to your GSC account. Learn more about properties here.`, }, "source_type": schema.StringAttribute{ Computed: true, @@ -206,7 +223,7 @@ func (r *SourceGoogleSearchConsoleDataSource) Schema(ctx context.Context, req da Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format 2017-01-25. Any data before this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`, }, }, }, diff --git a/internal/provider/source_googlesearchconsole_resource.go b/internal/provider/source_googlesearchconsole_resource.go index 3762d0391..9152d546a 100755 --- a/internal/provider/source_googlesearchconsole_resource.go +++ b/internal/provider/source_googlesearchconsole_resource.go @@ -169,7 +169,24 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso }, "custom_reports": schema.StringAttribute{ Optional: true, - Description: `A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field.`, + Description: `(DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports.`, + }, + "custom_reports_array": schema.ListNestedAttribute{ + Optional: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "dimensions": schema.ListAttribute{ + Required: true, + ElementType: types.StringType, + Description: `A list of dimensions (country, date, device, page, query)`, + }, + "name": schema.StringAttribute{ + Required: true, + Description: `The name of the custom report, this name would be used as stream name`, + }, + }, + }, + Description: `You can add your Custom Analytics report by creating one.`, }, "data_state": schema.StringAttribute{ Optional: true, @@ -180,19 +197,19 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso ), }, MarkdownDescription: `must be one of ["final", "all"]` + "\n" + - `If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data.`, + `If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation.`, }, "end_date": schema.StringAttribute{ Optional: true, Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field.`, + Description: `UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward.`, }, "site_urls": schema.ListAttribute{ Required: true, ElementType: types.StringType, - Description: `The URLs of the website property attached to your GSC account. Read more here.`, + Description: `The URLs of the website property attached to your GSC account. Learn more about properties here.`, }, "source_type": schema.StringAttribute{ Required: true, @@ -204,11 +221,11 @@ func (r *SourceGoogleSearchConsoleResource) Schema(ctx context.Context, req reso Description: `must be one of ["google-search-console"]`, }, "start_date": schema.StringAttribute{ - Required: true, + Optional: true, Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format 2017-01-25. Any data before this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`, }, }, }, diff --git a/internal/provider/source_googlesearchconsole_resource_sdk.go b/internal/provider/source_googlesearchconsole_resource_sdk.go index d52da6c99..3aa2b46fb 100755 --- a/internal/provider/source_googlesearchconsole_resource_sdk.go +++ b/internal/provider/source_googlesearchconsole_resource_sdk.go @@ -57,9 +57,21 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc } else { customReports = nil } - dataState := new(shared.SourceGoogleSearchConsoleDataState) + var customReportsArray []shared.SourceGoogleSearchConsoleCustomReportConfig = nil + for _, customReportsArrayItem := range r.Configuration.CustomReportsArray { + var dimensions []shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums = nil + for _, dimensionsItem := range customReportsArrayItem.Dimensions { + dimensions = append(dimensions, shared.SourceGoogleSearchConsoleCustomReportConfigValidEnums(dimensionsItem.ValueString())) + } + name := customReportsArrayItem.Name.ValueString() + customReportsArray = append(customReportsArray, shared.SourceGoogleSearchConsoleCustomReportConfig{ + Dimensions: dimensions, + Name: name, + }) + } + dataState := new(shared.SourceGoogleSearchConsoleDataFreshness) if !r.Configuration.DataState.IsUnknown() && !r.Configuration.DataState.IsNull() { - *dataState = shared.SourceGoogleSearchConsoleDataState(r.Configuration.DataState.ValueString()) + *dataState = shared.SourceGoogleSearchConsoleDataFreshness(r.Configuration.DataState.ValueString()) } else { dataState = nil } @@ -74,17 +86,23 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc siteUrls = append(siteUrls, siteUrlsItem.ValueString()) } sourceType := shared.SourceGoogleSearchConsoleGoogleSearchConsole(r.Configuration.SourceType.ValueString()) - startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString()) + startDate := new(customTypes.Date) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceGoogleSearchConsole{ - Authorization: authorization, - CustomReports: customReports, - DataState: dataState, - EndDate: endDate, - SiteUrls: siteUrls, - SourceType: sourceType, - StartDate: startDate, - } - name := r.Name.ValueString() + Authorization: authorization, + CustomReports: customReports, + CustomReportsArray: customReportsArray, + DataState: dataState, + EndDate: endDate, + SiteUrls: siteUrls, + SourceType: sourceType, + StartDate: startDate, + } + name1 := r.Name.ValueString() secretID := new(string) if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() { *secretID = r.SecretID.ValueString() @@ -94,7 +112,7 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToCreateSDKType() *shared.Sourc workspaceID := r.WorkspaceID.ValueString() out := shared.SourceGoogleSearchConsoleCreateRequest{ Configuration: configuration, - Name: name, + Name: name1, SecretID: secretID, WorkspaceID: workspaceID, } @@ -155,9 +173,21 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToUpdateSDKType() *shared.Sourc } else { customReports = nil } - dataState := new(shared.SourceGoogleSearchConsoleUpdateDataState) + var customReportsArray []shared.SourceGoogleSearchConsoleUpdateCustomReportConfig = nil + for _, customReportsArrayItem := range r.Configuration.CustomReportsArray { + var dimensions []shared.SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = nil + for _, dimensionsItem := range customReportsArrayItem.Dimensions { + dimensions = append(dimensions, shared.SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums(dimensionsItem.ValueString())) + } + name := customReportsArrayItem.Name.ValueString() + customReportsArray = append(customReportsArray, shared.SourceGoogleSearchConsoleUpdateCustomReportConfig{ + Dimensions: dimensions, + Name: name, + }) + } + dataState := new(shared.SourceGoogleSearchConsoleUpdateDataFreshness) if !r.Configuration.DataState.IsUnknown() && !r.Configuration.DataState.IsNull() { - *dataState = shared.SourceGoogleSearchConsoleUpdateDataState(r.Configuration.DataState.ValueString()) + *dataState = shared.SourceGoogleSearchConsoleUpdateDataFreshness(r.Configuration.DataState.ValueString()) } else { dataState = nil } @@ -171,20 +201,26 @@ func (r *SourceGoogleSearchConsoleResourceModel) ToUpdateSDKType() *shared.Sourc for _, siteUrlsItem := range r.Configuration.SiteUrls { siteUrls = append(siteUrls, siteUrlsItem.ValueString()) } - startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString()) + startDate := new(customTypes.Date) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceGoogleSearchConsoleUpdate{ - Authorization: authorization, - CustomReports: customReports, - DataState: dataState, - EndDate: endDate, - SiteUrls: siteUrls, - StartDate: startDate, - } - name := r.Name.ValueString() + Authorization: authorization, + CustomReports: customReports, + CustomReportsArray: customReportsArray, + DataState: dataState, + EndDate: endDate, + SiteUrls: siteUrls, + StartDate: startDate, + } + name1 := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() out := shared.SourceGoogleSearchConsolePutRequest{ Configuration: configuration, - Name: name, + Name: name1, WorkspaceID: workspaceID, } return &out diff --git a/internal/provider/source_googlesheets_data_source.go b/internal/provider/source_googlesheets_data_source.go index dfc29817d..0c561e58a 100755 --- a/internal/provider/source_googlesheets_data_source.go +++ b/internal/provider/source_googlesheets_data_source.go @@ -158,10 +158,6 @@ func (r *SourceGoogleSheetsDataSource) Schema(ctx context.Context, req datasourc Computed: true, Description: `Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.`, }, - "row_batch_size": schema.Int64Attribute{ - Computed: true, - Description: `The number of rows fetched when making a Google Sheet API call. Defaults to 200.`, - }, "source_type": schema.StringAttribute{ Computed: true, Validators: []validator.String{ diff --git a/internal/provider/source_googlesheets_resource.go b/internal/provider/source_googlesheets_resource.go index aecf8b30a..11413f548 100755 --- a/internal/provider/source_googlesheets_resource.go +++ b/internal/provider/source_googlesheets_resource.go @@ -160,10 +160,6 @@ func (r *SourceGoogleSheetsResource) Schema(ctx context.Context, req resource.Sc Optional: true, Description: `Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based.`, }, - "row_batch_size": schema.Int64Attribute{ - Optional: true, - Description: `The number of rows fetched when making a Google Sheet API call. Defaults to 200.`, - }, "source_type": schema.StringAttribute{ Required: true, Validators: []validator.String{ diff --git a/internal/provider/source_googlesheets_resource_sdk.go b/internal/provider/source_googlesheets_resource_sdk.go index 574ac5eac..d217bd4dc 100755 --- a/internal/provider/source_googlesheets_resource_sdk.go +++ b/internal/provider/source_googlesheets_resource_sdk.go @@ -47,18 +47,11 @@ func (r *SourceGoogleSheetsResourceModel) ToCreateSDKType() *shared.SourceGoogle } else { namesConversion = nil } - rowBatchSize := new(int64) - if !r.Configuration.RowBatchSize.IsUnknown() && !r.Configuration.RowBatchSize.IsNull() { - *rowBatchSize = r.Configuration.RowBatchSize.ValueInt64() - } else { - rowBatchSize = nil - } sourceType := shared.SourceGoogleSheetsGoogleSheets(r.Configuration.SourceType.ValueString()) spreadsheetID := r.Configuration.SpreadsheetID.ValueString() configuration := shared.SourceGoogleSheets{ Credentials: credentials, NamesConversion: namesConversion, - RowBatchSize: rowBatchSize, SourceType: sourceType, SpreadsheetID: spreadsheetID, } @@ -124,17 +117,10 @@ func (r *SourceGoogleSheetsResourceModel) ToUpdateSDKType() *shared.SourceGoogle } else { namesConversion = nil } - rowBatchSize := new(int64) - if !r.Configuration.RowBatchSize.IsUnknown() && !r.Configuration.RowBatchSize.IsNull() { - *rowBatchSize = r.Configuration.RowBatchSize.ValueInt64() - } else { - rowBatchSize = nil - } spreadsheetID := r.Configuration.SpreadsheetID.ValueString() configuration := shared.SourceGoogleSheetsUpdate{ Credentials: credentials, NamesConversion: namesConversion, - RowBatchSize: rowBatchSize, SpreadsheetID: spreadsheetID, } name := r.Name.ValueString() diff --git a/internal/provider/source_insightly_resource_sdk.go b/internal/provider/source_insightly_resource_sdk.go index bc9f1d8f4..052dbae7a 100755 --- a/internal/provider/source_insightly_resource_sdk.go +++ b/internal/provider/source_insightly_resource_sdk.go @@ -9,8 +9,18 @@ import ( func (r *SourceInsightlyResourceModel) ToCreateSDKType() *shared.SourceInsightlyCreateRequest { sourceType := shared.SourceInsightlyInsightly(r.Configuration.SourceType.ValueString()) - startDate := r.Configuration.StartDate.ValueString() - token := r.Configuration.Token.ValueString() + startDate := new(string) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate = r.Configuration.StartDate.ValueString() + } else { + startDate = nil + } + token := new(string) + if !r.Configuration.Token.IsUnknown() && !r.Configuration.Token.IsNull() { + *token = r.Configuration.Token.ValueString() + } else { + token = nil + } configuration := shared.SourceInsightly{ SourceType: sourceType, StartDate: startDate, @@ -39,8 +49,18 @@ func (r *SourceInsightlyResourceModel) ToGetSDKType() *shared.SourceInsightlyCre } func (r *SourceInsightlyResourceModel) ToUpdateSDKType() *shared.SourceInsightlyPutRequest { - startDate := r.Configuration.StartDate.ValueString() - token := r.Configuration.Token.ValueString() + startDate := new(string) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate = r.Configuration.StartDate.ValueString() + } else { + startDate = nil + } + token := new(string) + if !r.Configuration.Token.IsUnknown() && !r.Configuration.Token.IsNull() { + *token = r.Configuration.Token.ValueString() + } else { + token = nil + } configuration := shared.SourceInsightlyUpdate{ StartDate: startDate, Token: token, diff --git a/internal/provider/source_intercom_data_source.go b/internal/provider/source_intercom_data_source.go index 98e8886cc..83a103ec4 100755 --- a/internal/provider/source_intercom_data_source.go +++ b/internal/provider/source_intercom_data_source.go @@ -57,6 +57,14 @@ func (r *SourceIntercomDataSource) Schema(ctx context.Context, req datasource.Sc Computed: true, Description: `Access token for making authenticated requests. See the Intercom docs for more information.`, }, + "client_id": schema.StringAttribute{ + Computed: true, + Description: `Client Id for your Intercom application.`, + }, + "client_secret": schema.StringAttribute{ + Computed: true, + Description: `Client Secret for your Intercom application.`, + }, "source_type": schema.StringAttribute{ Computed: true, Validators: []validator.String{ diff --git a/internal/provider/source_intercom_resource.go b/internal/provider/source_intercom_resource.go index 10f3adc16..41a8570b7 100755 --- a/internal/provider/source_intercom_resource.go +++ b/internal/provider/source_intercom_resource.go @@ -59,6 +59,14 @@ func (r *SourceIntercomResource) Schema(ctx context.Context, req resource.Schema Required: true, Description: `Access token for making authenticated requests. See the Intercom docs for more information.`, }, + "client_id": schema.StringAttribute{ + Optional: true, + Description: `Client Id for your Intercom application.`, + }, + "client_secret": schema.StringAttribute{ + Optional: true, + Description: `Client Secret for your Intercom application.`, + }, "source_type": schema.StringAttribute{ Required: true, Validators: []validator.String{ diff --git a/internal/provider/source_intercom_resource_sdk.go b/internal/provider/source_intercom_resource_sdk.go index 22b6881b8..294ac16be 100755 --- a/internal/provider/source_intercom_resource_sdk.go +++ b/internal/provider/source_intercom_resource_sdk.go @@ -10,12 +10,26 @@ import ( func (r *SourceIntercomResourceModel) ToCreateSDKType() *shared.SourceIntercomCreateRequest { accessToken := r.Configuration.AccessToken.ValueString() + clientID := new(string) + if !r.Configuration.ClientID.IsUnknown() && !r.Configuration.ClientID.IsNull() { + *clientID = r.Configuration.ClientID.ValueString() + } else { + clientID = nil + } + clientSecret := new(string) + if !r.Configuration.ClientSecret.IsUnknown() && !r.Configuration.ClientSecret.IsNull() { + *clientSecret = r.Configuration.ClientSecret.ValueString() + } else { + clientSecret = nil + } sourceType := shared.SourceIntercomIntercom(r.Configuration.SourceType.ValueString()) startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) configuration := shared.SourceIntercom{ - AccessToken: accessToken, - SourceType: sourceType, - StartDate: startDate, + AccessToken: accessToken, + ClientID: clientID, + ClientSecret: clientSecret, + SourceType: sourceType, + StartDate: startDate, } name := r.Name.ValueString() secretID := new(string) @@ -41,10 +55,24 @@ func (r *SourceIntercomResourceModel) ToGetSDKType() *shared.SourceIntercomCreat func (r *SourceIntercomResourceModel) ToUpdateSDKType() *shared.SourceIntercomPutRequest { accessToken := r.Configuration.AccessToken.ValueString() + clientID := new(string) + if !r.Configuration.ClientID.IsUnknown() && !r.Configuration.ClientID.IsNull() { + *clientID = r.Configuration.ClientID.ValueString() + } else { + clientID = nil + } + clientSecret := new(string) + if !r.Configuration.ClientSecret.IsUnknown() && !r.Configuration.ClientSecret.IsNull() { + *clientSecret = r.Configuration.ClientSecret.ValueString() + } else { + clientSecret = nil + } startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) configuration := shared.SourceIntercomUpdate{ - AccessToken: accessToken, - StartDate: startDate, + AccessToken: accessToken, + ClientID: clientID, + ClientSecret: clientSecret, + StartDate: startDate, } name := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() diff --git a/internal/provider/source_lemlist_data_source.go b/internal/provider/source_lemlist_data_source.go index 0f44638a7..f2c6dce4b 100755 --- a/internal/provider/source_lemlist_data_source.go +++ b/internal/provider/source_lemlist_data_source.go @@ -54,7 +54,7 @@ func (r *SourceLemlistDataSource) Schema(ctx context.Context, req datasource.Sch Attributes: map[string]schema.Attribute{ "api_key": schema.StringAttribute{ Computed: true, - Description: `Lemlist API key.`, + Description: `Lemlist API key,`, }, "source_type": schema.StringAttribute{ Computed: true, diff --git a/internal/provider/source_lemlist_resource.go b/internal/provider/source_lemlist_resource.go index bb9dafe4e..6702061dd 100755 --- a/internal/provider/source_lemlist_resource.go +++ b/internal/provider/source_lemlist_resource.go @@ -56,7 +56,7 @@ func (r *SourceLemlistResource) Schema(ctx context.Context, req resource.SchemaR Attributes: map[string]schema.Attribute{ "api_key": schema.StringAttribute{ Required: true, - Description: `Lemlist API key.`, + Description: `Lemlist API key,`, }, "source_type": schema.StringAttribute{ Required: true, diff --git a/internal/provider/source_linkedinads_data_source.go b/internal/provider/source_linkedinads_data_source.go index 162c30145..ec25069f9 100755 --- a/internal/provider/source_linkedinads_data_source.go +++ b/internal/provider/source_linkedinads_data_source.go @@ -56,7 +56,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource "account_ids": schema.ListAttribute{ Computed: true, ElementType: types.Int64Type, - Description: `Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info.`, + Description: `Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs.`, }, "ad_analytics_reports": schema.ListNestedAttribute{ Computed: true, @@ -64,7 +64,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource Attributes: map[string]schema.Attribute{ "name": schema.StringAttribute{ Computed: true, - Description: `The name for the report`, + Description: `The name for the custom report.`, }, "pivot_by": schema.StringAttribute{ Computed: true, @@ -94,7 +94,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource ), }, MarkdownDescription: `must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"]` + "\n" + - `Select value from list to pivot by`, + `Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.`, }, "time_granularity": schema.StringAttribute{ Computed: true, @@ -107,11 +107,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource ), }, MarkdownDescription: `must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"]` + "\n" + - `Set time granularity for report: ` + "\n" + - `ALL - Results grouped into a single result across the entire time range of the report.` + "\n" + - `DAILY - Results grouped by day.` + "\n" + - `MONTHLY - Results grouped by month.` + "\n" + - `YEARLY - Results grouped by year.`, + `Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.`, }, }, }, @@ -124,7 +120,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource Attributes: map[string]schema.Attribute{ "access_token": schema.StringAttribute{ Computed: true, - Description: `The token value generated using the authentication code. See the docs to obtain yours.`, + Description: `The access token generated for your developer application. Refer to our documentation for more information.`, }, "auth_method": schema.StringAttribute{ Computed: true, @@ -151,15 +147,15 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource }, "client_id": schema.StringAttribute{ Computed: true, - Description: `The client ID of the LinkedIn Ads developer application.`, + Description: `The client ID of your developer application. Refer to our documentation for more information.`, }, "client_secret": schema.StringAttribute{ Computed: true, - Description: `The client secret the LinkedIn Ads developer application.`, + Description: `The client secret of your developer application. Refer to our documentation for more information.`, }, "refresh_token": schema.StringAttribute{ Computed: true, - Description: `The key to refresh the expired access token.`, + Description: `The key to refresh the expired access token. Refer to our documentation for more information.`, }, }, }, @@ -168,7 +164,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource Attributes: map[string]schema.Attribute{ "access_token": schema.StringAttribute{ Computed: true, - Description: `The token value generated using the authentication code. See the docs to obtain yours.`, + Description: `The access token generated for your developer application. Refer to our documentation for more information.`, }, "auth_method": schema.StringAttribute{ Computed: true, @@ -195,15 +191,15 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource }, "client_id": schema.StringAttribute{ Computed: true, - Description: `The client ID of the LinkedIn Ads developer application.`, + Description: `The client ID of your developer application. Refer to our documentation for more information.`, }, "client_secret": schema.StringAttribute{ Computed: true, - Description: `The client secret the LinkedIn Ads developer application.`, + Description: `The client secret of your developer application. Refer to our documentation for more information.`, }, "refresh_token": schema.StringAttribute{ Computed: true, - Description: `The key to refresh the expired access token.`, + Description: `The key to refresh the expired access token. Refer to our documentation for more information.`, }, }, }, @@ -226,7 +222,7 @@ func (r *SourceLinkedinAdsDataSource) Schema(ctx context.Context, req datasource Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format 2020-09-17. Any data before this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`, }, }, }, diff --git a/internal/provider/source_linkedinads_resource.go b/internal/provider/source_linkedinads_resource.go index b3244d789..ca24efad9 100755 --- a/internal/provider/source_linkedinads_resource.go +++ b/internal/provider/source_linkedinads_resource.go @@ -58,7 +58,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch "account_ids": schema.ListAttribute{ Optional: true, ElementType: types.Int64Type, - Description: `Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info.`, + Description: `Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs.`, }, "ad_analytics_reports": schema.ListNestedAttribute{ Optional: true, @@ -66,7 +66,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch Attributes: map[string]schema.Attribute{ "name": schema.StringAttribute{ Required: true, - Description: `The name for the report`, + Description: `The name for the custom report.`, }, "pivot_by": schema.StringAttribute{ Required: true, @@ -96,7 +96,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch ), }, MarkdownDescription: `must be one of ["COMPANY", "ACCOUNT", "SHARE", "CAMPAIGN", "CREATIVE", "CAMPAIGN_GROUP", "CONVERSION", "CONVERSATION_NODE", "CONVERSATION_NODE_OPTION_INDEX", "SERVING_LOCATION", "CARD_INDEX", "MEMBER_COMPANY_SIZE", "MEMBER_INDUSTRY", "MEMBER_SENIORITY", "MEMBER_JOB_TITLE ", "MEMBER_JOB_FUNCTION ", "MEMBER_COUNTRY_V2 ", "MEMBER_REGION_V2", "MEMBER_COMPANY", "PLACEMENT_NAME", "IMPRESSION_DEVICE_TYPE"]` + "\n" + - `Select value from list to pivot by`, + `Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives.`, }, "time_granularity": schema.StringAttribute{ Required: true, @@ -109,11 +109,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch ), }, MarkdownDescription: `must be one of ["ALL", "DAILY", "MONTHLY", "YEARLY"]` + "\n" + - `Set time granularity for report: ` + "\n" + - `ALL - Results grouped into a single result across the entire time range of the report.` + "\n" + - `DAILY - Results grouped by day.` + "\n" + - `MONTHLY - Results grouped by month.` + "\n" + - `YEARLY - Results grouped by year.`, + `Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods.`, }, }, }, @@ -126,7 +122,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch Attributes: map[string]schema.Attribute{ "access_token": schema.StringAttribute{ Required: true, - Description: `The token value generated using the authentication code. See the docs to obtain yours.`, + Description: `The access token generated for your developer application. Refer to our documentation for more information.`, }, "auth_method": schema.StringAttribute{ Optional: true, @@ -153,15 +149,15 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch }, "client_id": schema.StringAttribute{ Required: true, - Description: `The client ID of the LinkedIn Ads developer application.`, + Description: `The client ID of your developer application. Refer to our documentation for more information.`, }, "client_secret": schema.StringAttribute{ Required: true, - Description: `The client secret the LinkedIn Ads developer application.`, + Description: `The client secret of your developer application. Refer to our documentation for more information.`, }, "refresh_token": schema.StringAttribute{ Required: true, - Description: `The key to refresh the expired access token.`, + Description: `The key to refresh the expired access token. Refer to our documentation for more information.`, }, }, }, @@ -170,7 +166,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch Attributes: map[string]schema.Attribute{ "access_token": schema.StringAttribute{ Required: true, - Description: `The token value generated using the authentication code. See the docs to obtain yours.`, + Description: `The access token generated for your developer application. Refer to our documentation for more information.`, }, "auth_method": schema.StringAttribute{ Optional: true, @@ -197,15 +193,15 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch }, "client_id": schema.StringAttribute{ Required: true, - Description: `The client ID of the LinkedIn Ads developer application.`, + Description: `The client ID of your developer application. Refer to our documentation for more information.`, }, "client_secret": schema.StringAttribute{ Required: true, - Description: `The client secret the LinkedIn Ads developer application.`, + Description: `The client secret of your developer application. Refer to our documentation for more information.`, }, "refresh_token": schema.StringAttribute{ Required: true, - Description: `The key to refresh the expired access token.`, + Description: `The key to refresh the expired access token. Refer to our documentation for more information.`, }, }, }, @@ -228,7 +224,7 @@ func (r *SourceLinkedinAdsResource) Schema(ctx context.Context, req resource.Sch Validators: []validator.String{ validators.IsValidDate(), }, - Description: `UTC date in the format 2020-09-17. Any data before this date will not be replicated.`, + Description: `UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated.`, }, }, }, diff --git a/internal/provider/source_linkedinads_resource_sdk.go b/internal/provider/source_linkedinads_resource_sdk.go index 31845e412..dcfecef45 100755 --- a/internal/provider/source_linkedinads_resource_sdk.go +++ b/internal/provider/source_linkedinads_resource_sdk.go @@ -16,7 +16,7 @@ func (r *SourceLinkedinAdsResourceModel) ToCreateSDKType() *shared.SourceLinkedi var adAnalyticsReports []shared.SourceLinkedinAdsAdAnalyticsReportConfiguration = nil for _, adAnalyticsReportsItem := range r.Configuration.AdAnalyticsReports { name := adAnalyticsReportsItem.Name.ValueString() - pivotBy := shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy(adAnalyticsReportsItem.PivotBy.ValueString()) + pivotBy := shared.SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory(adAnalyticsReportsItem.PivotBy.ValueString()) timeGranularity := shared.SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity(adAnalyticsReportsItem.TimeGranularity.ValueString()) adAnalyticsReports = append(adAnalyticsReports, shared.SourceLinkedinAdsAdAnalyticsReportConfiguration{ Name: name, @@ -108,7 +108,7 @@ func (r *SourceLinkedinAdsResourceModel) ToUpdateSDKType() *shared.SourceLinkedi var adAnalyticsReports []shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration = nil for _, adAnalyticsReportsItem := range r.Configuration.AdAnalyticsReports { name := adAnalyticsReportsItem.Name.ValueString() - pivotBy := shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy(adAnalyticsReportsItem.PivotBy.ValueString()) + pivotBy := shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory(adAnalyticsReportsItem.PivotBy.ValueString()) timeGranularity := shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity(adAnalyticsReportsItem.TimeGranularity.ValueString()) adAnalyticsReports = append(adAnalyticsReports, shared.SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration{ Name: name, diff --git a/internal/provider/source_mssql_data_source.go b/internal/provider/source_mssql_data_source.go index 3c04acbac..fcf445595 100755 --- a/internal/provider/source_mssql_data_source.go +++ b/internal/provider/source_mssql_data_source.go @@ -76,7 +76,7 @@ func (r *SourceMssqlDataSource) Schema(ctx context.Context, req datasource.Schem "replication_method": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ - "source_mssql_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_mssql_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "data_to_sync": schema.StringAttribute{ @@ -115,9 +115,9 @@ func (r *SourceMssqlDataSource) Schema(ctx context.Context, req datasource.Schem `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`, }, }, - Description: `CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`, }, - "source_mssql_replication_method_standard": schema.SingleNestedAttribute{ + "source_mssql_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -130,9 +130,9 @@ func (r *SourceMssqlDataSource) Schema(ctx context.Context, req datasource.Schem Description: `must be one of ["STANDARD"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, - "source_mssql_update_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_mssql_update_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "data_to_sync": schema.StringAttribute{ @@ -171,9 +171,9 @@ func (r *SourceMssqlDataSource) Schema(ctx context.Context, req datasource.Schem `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`, }, }, - Description: `CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`, }, - "source_mssql_update_replication_method_standard": schema.SingleNestedAttribute{ + "source_mssql_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -186,13 +186,13 @@ func (r *SourceMssqlDataSource) Schema(ctx context.Context, req datasource.Schem Description: `must be one of ["STANDARD"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, }, Validators: []validator.Object{ validators.ExactlyOneChild(), }, - Description: `The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.`, + Description: `Configures how data is extracted from the database.`, }, "schemas": schema.ListAttribute{ Computed: true, diff --git a/internal/provider/source_mssql_resource.go b/internal/provider/source_mssql_resource.go index 4834b5895..81820013d 100755 --- a/internal/provider/source_mssql_resource.go +++ b/internal/provider/source_mssql_resource.go @@ -78,7 +78,7 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq "replication_method": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ - "source_mssql_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_mssql_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "data_to_sync": schema.StringAttribute{ @@ -117,9 +117,9 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`, }, }, - Description: `CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`, }, - "source_mssql_replication_method_standard": schema.SingleNestedAttribute{ + "source_mssql_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -132,9 +132,9 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq Description: `must be one of ["STANDARD"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, - "source_mssql_update_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_mssql_update_update_method_read_changes_using_change_data_capture_cdc": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "data_to_sync": schema.StringAttribute{ @@ -173,9 +173,9 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq `Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database.`, }, }, - Description: `CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database.`, }, - "source_mssql_update_replication_method_standard": schema.SingleNestedAttribute{ + "source_mssql_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -188,13 +188,13 @@ func (r *SourceMssqlResource) Schema(ctx context.Context, req resource.SchemaReq Description: `must be one of ["STANDARD"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, }, Validators: []validator.Object{ validators.ExactlyOneChild(), }, - Description: `The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself.`, + Description: `Configures how data is extracted from the database.`, }, "schemas": schema.ListAttribute{ Optional: true, diff --git a/internal/provider/source_mssql_resource_sdk.go b/internal/provider/source_mssql_resource_sdk.go index b05c4a0d4..4dfb51698 100755 --- a/internal/provider/source_mssql_resource_sdk.go +++ b/internal/provider/source_mssql_resource_sdk.go @@ -23,51 +23,51 @@ func (r *SourceMssqlResourceModel) ToCreateSDKType() *shared.SourceMssqlCreateRe password = nil } port := r.Configuration.Port.ValueInt64() - var replicationMethod *shared.SourceMssqlReplicationMethod + var replicationMethod *shared.SourceMssqlUpdateMethod if r.Configuration.ReplicationMethod != nil { - var sourceMssqlReplicationMethodStandard *shared.SourceMssqlReplicationMethodStandard - if r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodStandard != nil { - method := shared.SourceMssqlReplicationMethodStandardMethod(r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodStandard.Method.ValueString()) - sourceMssqlReplicationMethodStandard = &shared.SourceMssqlReplicationMethodStandard{ - Method: method, - } - } - if sourceMssqlReplicationMethodStandard != nil { - replicationMethod = &shared.SourceMssqlReplicationMethod{ - SourceMssqlReplicationMethodStandard: sourceMssqlReplicationMethodStandard, - } - } - var sourceMssqlReplicationMethodLogicalReplicationCDC *shared.SourceMssqlReplicationMethodLogicalReplicationCDC - if r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC != nil { - dataToSync := new(shared.SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync) - if !r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.DataToSync.IsNull() { - *dataToSync = shared.SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync(r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.DataToSync.ValueString()) + var sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC *shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC + if r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil { + dataToSync := new(shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) + if !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsNull() { + *dataToSync = shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.ValueString()) } else { dataToSync = nil } initialWaitingSeconds := new(int64) - if !r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsNull() { - *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.ValueInt64() + if !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsNull() { + *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.ValueInt64() } else { initialWaitingSeconds = nil } - method1 := shared.SourceMssqlReplicationMethodLogicalReplicationCDCMethod(r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.Method.ValueString()) - snapshotIsolation := new(shared.SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel) - if !r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.SnapshotIsolation.IsNull() { - *snapshotIsolation = shared.SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.SourceMssqlReplicationMethodLogicalReplicationCDC.SnapshotIsolation.ValueString()) + method := shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.Method.ValueString()) + snapshotIsolation := new(shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) + if !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsNull() { + *snapshotIsolation = shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.ValueString()) } else { snapshotIsolation = nil } - sourceMssqlReplicationMethodLogicalReplicationCDC = &shared.SourceMssqlReplicationMethodLogicalReplicationCDC{ + sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC = &shared.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC{ DataToSync: dataToSync, InitialWaitingSeconds: initialWaitingSeconds, - Method: method1, + Method: method, SnapshotIsolation: snapshotIsolation, } } - if sourceMssqlReplicationMethodLogicalReplicationCDC != nil { - replicationMethod = &shared.SourceMssqlReplicationMethod{ - SourceMssqlReplicationMethodLogicalReplicationCDC: sourceMssqlReplicationMethodLogicalReplicationCDC, + if sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil { + replicationMethod = &shared.SourceMssqlUpdateMethod{ + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC: sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC, + } + } + var sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor *shared.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor + if r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor != nil { + method1 := shared.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString()) + sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor{ + Method: method1, + } + } + if sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor != nil { + replicationMethod = &shared.SourceMssqlUpdateMethod{ + SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor: sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor, } } } @@ -217,51 +217,51 @@ func (r *SourceMssqlResourceModel) ToUpdateSDKType() *shared.SourceMssqlPutReque password = nil } port := r.Configuration.Port.ValueInt64() - var replicationMethod *shared.SourceMssqlUpdateReplicationMethod + var replicationMethod *shared.SourceMssqlUpdateUpdateMethod if r.Configuration.ReplicationMethod != nil { - var sourceMssqlUpdateReplicationMethodStandard *shared.SourceMssqlUpdateReplicationMethodStandard - if r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodStandard != nil { - method := shared.SourceMssqlUpdateReplicationMethodStandardMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodStandard.Method.ValueString()) - sourceMssqlUpdateReplicationMethodStandard = &shared.SourceMssqlUpdateReplicationMethodStandard{ - Method: method, - } - } - if sourceMssqlUpdateReplicationMethodStandard != nil { - replicationMethod = &shared.SourceMssqlUpdateReplicationMethod{ - SourceMssqlUpdateReplicationMethodStandard: sourceMssqlUpdateReplicationMethodStandard, - } - } - var sourceMssqlUpdateReplicationMethodLogicalReplicationCDC *shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC - if r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC != nil { - dataToSync := new(shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync) - if !r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.DataToSync.IsNull() { - *dataToSync = shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync(r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.DataToSync.ValueString()) + var sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC *shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC + if r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil { + dataToSync := new(shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) + if !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.IsNull() { + *dataToSync = shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.DataToSync.ValueString()) } else { dataToSync = nil } initialWaitingSeconds := new(int64) - if !r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsNull() { - *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.ValueInt64() + if !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.IsNull() { + *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.InitialWaitingSeconds.ValueInt64() } else { initialWaitingSeconds = nil } - method1 := shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.Method.ValueString()) - snapshotIsolation := new(shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel) - if !r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.SnapshotIsolation.IsNull() { - *snapshotIsolation = shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC.SnapshotIsolation.ValueString()) + method := shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.Method.ValueString()) + snapshotIsolation := new(shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) + if !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsUnknown() && !r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.IsNull() { + *snapshotIsolation = shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC.SnapshotIsolation.ValueString()) } else { snapshotIsolation = nil } - sourceMssqlUpdateReplicationMethodLogicalReplicationCDC = &shared.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC{ + sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC = &shared.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC{ DataToSync: dataToSync, InitialWaitingSeconds: initialWaitingSeconds, - Method: method1, + Method: method, SnapshotIsolation: snapshotIsolation, } } - if sourceMssqlUpdateReplicationMethodLogicalReplicationCDC != nil { - replicationMethod = &shared.SourceMssqlUpdateReplicationMethod{ - SourceMssqlUpdateReplicationMethodLogicalReplicationCDC: sourceMssqlUpdateReplicationMethodLogicalReplicationCDC, + if sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil { + replicationMethod = &shared.SourceMssqlUpdateUpdateMethod{ + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC: sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC, + } + } + var sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *shared.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor + if r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { + method1 := shared.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString()) + sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor{ + Method: method1, + } + } + if sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { + replicationMethod = &shared.SourceMssqlUpdateUpdateMethod{ + SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor: sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor, } } } diff --git a/internal/provider/source_openweather_data_source.go b/internal/provider/source_openweather_data_source.go deleted file mode 100755 index ec2d036d4..000000000 --- a/internal/provider/source_openweather_data_source.go +++ /dev/null @@ -1,231 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk" - "airbyte/internal/sdk/pkg/models/operations" - "context" - "fmt" - - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -// Ensure provider defined types fully satisfy framework interfaces. -var _ datasource.DataSource = &SourceOpenweatherDataSource{} -var _ datasource.DataSourceWithConfigure = &SourceOpenweatherDataSource{} - -func NewSourceOpenweatherDataSource() datasource.DataSource { - return &SourceOpenweatherDataSource{} -} - -// SourceOpenweatherDataSource is the data source implementation. -type SourceOpenweatherDataSource struct { - client *sdk.SDK -} - -// SourceOpenweatherDataSourceModel describes the data model. -type SourceOpenweatherDataSourceModel struct { - Configuration SourceOpenweather `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - WorkspaceID types.String `tfsdk:"workspace_id"` -} - -// Metadata returns the data source type name. -func (r *SourceOpenweatherDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_source_openweather" -} - -// Schema defines the schema for the data source. -func (r *SourceOpenweatherDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "SourceOpenweather DataSource", - - Attributes: map[string]schema.Attribute{ - "configuration": schema.SingleNestedAttribute{ - Computed: true, - Attributes: map[string]schema.Attribute{ - "appid": schema.StringAttribute{ - Computed: true, - Description: `Your OpenWeather API Key. See here. The key is case sensitive.`, - }, - "lang": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "af", - "al", - "ar", - "az", - "bg", - "ca", - "cz", - "da", - "de", - "el", - "en", - "eu", - "fa", - "fi", - "fr", - "gl", - "he", - "hi", - "hr", - "hu", - "id", - "it", - "ja", - "kr", - "la", - "lt", - "mk", - "no", - "nl", - "pl", - "pt", - "pt_br", - "ro", - "ru", - "sv", - "se", - "sk", - "sl", - "sp", - "es", - "sr", - "th", - "tr", - "ua", - "uk", - "vi", - "zh_cn", - "zh_tw", - "zu", - ), - }, - MarkdownDescription: `must be one of ["af", "al", "ar", "az", "bg", "ca", "cz", "da", "de", "el", "en", "eu", "fa", "fi", "fr", "gl", "he", "hi", "hr", "hu", "id", "it", "ja", "kr", "la", "lt", "mk", "no", "nl", "pl", "pt", "pt_br", "ro", "ru", "sv", "se", "sk", "sl", "sp", "es", "sr", "th", "tr", "ua", "uk", "vi", "zh_cn", "zh_tw", "zu"]` + "\n" + - `You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages.`, - }, - "lat": schema.StringAttribute{ - Computed: true, - Description: `Latitude for which you want to get weather condition from. (min -90, max 90)`, - }, - "lon": schema.StringAttribute{ - Computed: true, - Description: `Longitude for which you want to get weather condition from. (min -180, max 180)`, - }, - "source_type": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "openweather", - ), - }, - Description: `must be one of ["openweather"]`, - }, - "units": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "standard", - "metric", - "imperial", - ), - }, - MarkdownDescription: `must be one of ["standard", "metric", "imperial"]` + "\n" + - `Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default.`, - }, - }, - }, - "name": schema.StringAttribute{ - Computed: true, - }, - "secret_id": schema.StringAttribute{ - Optional: true, - Description: `Optional secretID obtained through the public API OAuth redirect flow.`, - }, - "source_id": schema.StringAttribute{ - Required: true, - }, - "workspace_id": schema.StringAttribute{ - Computed: true, - }, - }, - } -} - -func (r *SourceOpenweatherDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - client, ok := req.ProviderData.(*sdk.SDK) - - if !ok { - resp.Diagnostics.AddError( - "Unexpected DataSource Configure Type", - fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - - return - } - - r.client = client -} - -func (r *SourceOpenweatherDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data *SourceOpenweatherDataSourceModel - var item types.Object - - resp.Diagnostics.Append(req.Config.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.GetSourceOpenweatherRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.GetSourceOpenweather(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromGetResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} diff --git a/internal/provider/source_openweather_resource.go b/internal/provider/source_openweather_resource.go deleted file mode 100755 index 3d26b5850..000000000 --- a/internal/provider/source_openweather_resource.go +++ /dev/null @@ -1,397 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk" - "context" - "fmt" - - speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier" - "airbyte/internal/sdk/pkg/models/operations" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -// Ensure provider defined types fully satisfy framework interfaces. -var _ resource.Resource = &SourceOpenweatherResource{} -var _ resource.ResourceWithImportState = &SourceOpenweatherResource{} - -func NewSourceOpenweatherResource() resource.Resource { - return &SourceOpenweatherResource{} -} - -// SourceOpenweatherResource defines the resource implementation. -type SourceOpenweatherResource struct { - client *sdk.SDK -} - -// SourceOpenweatherResourceModel describes the resource data model. -type SourceOpenweatherResourceModel struct { - Configuration SourceOpenweather `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - SourceType types.String `tfsdk:"source_type"` - WorkspaceID types.String `tfsdk:"workspace_id"` -} - -func (r *SourceOpenweatherResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_source_openweather" -} - -func (r *SourceOpenweatherResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "SourceOpenweather Resource", - - Attributes: map[string]schema.Attribute{ - "configuration": schema.SingleNestedAttribute{ - Required: true, - Attributes: map[string]schema.Attribute{ - "appid": schema.StringAttribute{ - Required: true, - Description: `Your OpenWeather API Key. See here. The key is case sensitive.`, - }, - "lang": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "af", - "al", - "ar", - "az", - "bg", - "ca", - "cz", - "da", - "de", - "el", - "en", - "eu", - "fa", - "fi", - "fr", - "gl", - "he", - "hi", - "hr", - "hu", - "id", - "it", - "ja", - "kr", - "la", - "lt", - "mk", - "no", - "nl", - "pl", - "pt", - "pt_br", - "ro", - "ru", - "sv", - "se", - "sk", - "sl", - "sp", - "es", - "sr", - "th", - "tr", - "ua", - "uk", - "vi", - "zh_cn", - "zh_tw", - "zu", - ), - }, - MarkdownDescription: `must be one of ["af", "al", "ar", "az", "bg", "ca", "cz", "da", "de", "el", "en", "eu", "fa", "fi", "fr", "gl", "he", "hi", "hr", "hu", "id", "it", "ja", "kr", "la", "lt", "mk", "no", "nl", "pl", "pt", "pt_br", "ro", "ru", "sv", "se", "sk", "sl", "sp", "es", "sr", "th", "tr", "ua", "uk", "vi", "zh_cn", "zh_tw", "zu"]` + "\n" + - `You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages.`, - }, - "lat": schema.StringAttribute{ - Required: true, - Description: `Latitude for which you want to get weather condition from. (min -90, max 90)`, - }, - "lon": schema.StringAttribute{ - Required: true, - Description: `Longitude for which you want to get weather condition from. (min -180, max 180)`, - }, - "source_type": schema.StringAttribute{ - Required: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "openweather", - ), - }, - Description: `must be one of ["openweather"]`, - }, - "units": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "standard", - "metric", - "imperial", - ), - }, - MarkdownDescription: `must be one of ["standard", "metric", "imperial"]` + "\n" + - `Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default.`, - }, - }, - }, - "name": schema.StringAttribute{ - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - Required: true, - }, - "secret_id": schema.StringAttribute{ - Optional: true, - Description: `Optional secretID obtained through the public API OAuth redirect flow.`, - }, - "source_id": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - }, - "source_type": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - }, - "workspace_id": schema.StringAttribute{ - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - Required: true, - }, - }, - } -} - -func (r *SourceOpenweatherResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - client, ok := req.ProviderData.(*sdk.SDK) - - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - - return - } - - r.client = client -} - -func (r *SourceOpenweatherResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data *SourceOpenweatherResourceModel - var item types.Object - - resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - request := *data.ToCreateSDKType() - res, err := r.client.Sources.CreateSourceOpenweather(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromCreateResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourceOpenweatherResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data *SourceOpenweatherResourceModel - var item types.Object - - resp.Diagnostics.Append(req.State.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.GetSourceOpenweatherRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.GetSourceOpenweather(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromGetResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourceOpenweatherResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data *SourceOpenweatherResourceModel - merge(ctx, req, resp, &data) - if resp.Diagnostics.HasError() { - return - } - - sourceOpenweatherPutRequest := data.ToUpdateSDKType() - sourceID := data.SourceID.ValueString() - request := operations.PutSourceOpenweatherRequest{ - SourceOpenweatherPutRequest: sourceOpenweatherPutRequest, - SourceID: sourceID, - } - res, err := r.client.Sources.PutSourceOpenweather(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - sourceId1 := data.SourceID.ValueString() - getRequest := operations.GetSourceOpenweatherRequest{ - SourceID: sourceId1, - } - getResponse, err := r.client.Sources.GetSourceOpenweather(ctx, getRequest) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if getResponse == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse)) - return - } - if getResponse.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse)) - return - } - if getResponse.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse)) - return - } - data.RefreshFromGetResponse(getResponse.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourceOpenweatherResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data *SourceOpenweatherResourceModel - var item types.Object - - resp.Diagnostics.Append(req.State.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.DeleteSourceOpenweatherRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.DeleteSourceOpenweather(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - -} - -func (r *SourceOpenweatherResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp) -} diff --git a/internal/provider/source_openweather_resource_sdk.go b/internal/provider/source_openweather_resource_sdk.go deleted file mode 100755 index 18081a9e0..000000000 --- a/internal/provider/source_openweather_resource_sdk.go +++ /dev/null @@ -1,104 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "github.com/hashicorp/terraform-plugin-framework/types" -) - -func (r *SourceOpenweatherResourceModel) ToCreateSDKType() *shared.SourceOpenweatherCreateRequest { - appid := r.Configuration.Appid.ValueString() - lang := new(shared.SourceOpenweatherLanguage) - if !r.Configuration.Lang.IsUnknown() && !r.Configuration.Lang.IsNull() { - *lang = shared.SourceOpenweatherLanguage(r.Configuration.Lang.ValueString()) - } else { - lang = nil - } - lat := r.Configuration.Lat.ValueString() - lon := r.Configuration.Lon.ValueString() - sourceType := shared.SourceOpenweatherOpenweather(r.Configuration.SourceType.ValueString()) - units := new(shared.SourceOpenweatherUnits) - if !r.Configuration.Units.IsUnknown() && !r.Configuration.Units.IsNull() { - *units = shared.SourceOpenweatherUnits(r.Configuration.Units.ValueString()) - } else { - units = nil - } - configuration := shared.SourceOpenweather{ - Appid: appid, - Lang: lang, - Lat: lat, - Lon: lon, - SourceType: sourceType, - Units: units, - } - name := r.Name.ValueString() - secretID := new(string) - if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() { - *secretID = r.SecretID.ValueString() - } else { - secretID = nil - } - workspaceID := r.WorkspaceID.ValueString() - out := shared.SourceOpenweatherCreateRequest{ - Configuration: configuration, - Name: name, - SecretID: secretID, - WorkspaceID: workspaceID, - } - return &out -} - -func (r *SourceOpenweatherResourceModel) ToGetSDKType() *shared.SourceOpenweatherCreateRequest { - out := r.ToCreateSDKType() - return out -} - -func (r *SourceOpenweatherResourceModel) ToUpdateSDKType() *shared.SourceOpenweatherPutRequest { - appid := r.Configuration.Appid.ValueString() - lang := new(shared.SourceOpenweatherUpdateLanguage) - if !r.Configuration.Lang.IsUnknown() && !r.Configuration.Lang.IsNull() { - *lang = shared.SourceOpenweatherUpdateLanguage(r.Configuration.Lang.ValueString()) - } else { - lang = nil - } - lat := r.Configuration.Lat.ValueString() - lon := r.Configuration.Lon.ValueString() - units := new(shared.SourceOpenweatherUpdateUnits) - if !r.Configuration.Units.IsUnknown() && !r.Configuration.Units.IsNull() { - *units = shared.SourceOpenweatherUpdateUnits(r.Configuration.Units.ValueString()) - } else { - units = nil - } - configuration := shared.SourceOpenweatherUpdate{ - Appid: appid, - Lang: lang, - Lat: lat, - Lon: lon, - Units: units, - } - name := r.Name.ValueString() - workspaceID := r.WorkspaceID.ValueString() - out := shared.SourceOpenweatherPutRequest{ - Configuration: configuration, - Name: name, - WorkspaceID: workspaceID, - } - return &out -} - -func (r *SourceOpenweatherResourceModel) ToDeleteSDKType() *shared.SourceOpenweatherCreateRequest { - out := r.ToCreateSDKType() - return out -} - -func (r *SourceOpenweatherResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) { - r.Name = types.StringValue(resp.Name) - r.SourceID = types.StringValue(resp.SourceID) - r.SourceType = types.StringValue(resp.SourceType) - r.WorkspaceID = types.StringValue(resp.WorkspaceID) -} - -func (r *SourceOpenweatherResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) { - r.RefreshFromGetResponse(resp) -} diff --git a/internal/provider/source_postgres_data_source.go b/internal/provider/source_postgres_data_source.go index 367bcd617..33d3caf26 100755 --- a/internal/provider/source_postgres_data_source.go +++ b/internal/provider/source_postgres_data_source.go @@ -76,7 +76,22 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc "replication_method": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ - "source_postgres_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_postgres_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "method": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Xmin", + ), + }, + Description: `must be one of ["Xmin"]`, + }, + }, + Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`, + }, + "source_postgres_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "initial_waiting_seconds": schema.Int64Attribute{ @@ -133,9 +148,9 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc Description: `Parsed as JSON.`, }, }, - Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`, }, - "source_postgres_replication_method_standard": schema.SingleNestedAttribute{ + "source_postgres_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -148,9 +163,9 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc Description: `must be one of ["Standard"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, - "source_postgres_replication_method_standard_xmin": schema.SingleNestedAttribute{ + "source_postgres_update_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -163,9 +178,9 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc Description: `must be one of ["Xmin"]`, }, }, - Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`, }, - "source_postgres_update_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "initial_waiting_seconds": schema.Int64Attribute{ @@ -222,9 +237,9 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc Description: `Parsed as JSON.`, }, }, - Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`, }, - "source_postgres_update_replication_method_standard": schema.SingleNestedAttribute{ + "source_postgres_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -237,28 +252,13 @@ func (r *SourcePostgresDataSource) Schema(ctx context.Context, req datasource.Sc Description: `must be one of ["Standard"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, - }, - "source_postgres_update_replication_method_standard_xmin": schema.SingleNestedAttribute{ - Computed: true, - Attributes: map[string]schema.Attribute{ - "method": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "Xmin", - ), - }, - Description: `must be one of ["Xmin"]`, - }, - }, - Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, }, Validators: []validator.Object{ validators.ExactlyOneChild(), }, - Description: `Replication method for extracting data from the database.`, + Description: `Configures how data is extracted from the database.`, }, "schemas": schema.ListAttribute{ Computed: true, diff --git a/internal/provider/source_postgres_resource.go b/internal/provider/source_postgres_resource.go index d493191fe..cd6641c86 100755 --- a/internal/provider/source_postgres_resource.go +++ b/internal/provider/source_postgres_resource.go @@ -78,7 +78,22 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema "replication_method": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ - "source_postgres_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_postgres_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "method": schema.StringAttribute{ + Required: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Xmin", + ), + }, + Description: `must be one of ["Xmin"]`, + }, + }, + Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`, + }, + "source_postgres_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "initial_waiting_seconds": schema.Int64Attribute{ @@ -135,9 +150,9 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema Description: `Parsed as JSON.`, }, }, - Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`, }, - "source_postgres_replication_method_standard": schema.SingleNestedAttribute{ + "source_postgres_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -150,9 +165,9 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema Description: `must be one of ["Standard"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, - "source_postgres_replication_method_standard_xmin": schema.SingleNestedAttribute{ + "source_postgres_update_update_method_detect_changes_with_xmin_system_column": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -165,9 +180,9 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema Description: `must be one of ["Xmin"]`, }, }, - Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB.`, }, - "source_postgres_update_replication_method_logical_replication_cdc": schema.SingleNestedAttribute{ + "source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "initial_waiting_seconds": schema.Int64Attribute{ @@ -224,9 +239,9 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema Description: `Parsed as JSON.`, }, }, - Description: `Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs.`, + Description: `Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size.`, }, - "source_postgres_update_replication_method_standard": schema.SingleNestedAttribute{ + "source_postgres_update_update_method_scan_changes_with_user_defined_cursor": schema.SingleNestedAttribute{ Optional: true, Attributes: map[string]schema.Attribute{ "method": schema.StringAttribute{ @@ -239,28 +254,13 @@ func (r *SourcePostgresResource) Schema(ctx context.Context, req resource.Schema Description: `must be one of ["Standard"]`, }, }, - Description: `Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, - }, - "source_postgres_update_replication_method_standard_xmin": schema.SingleNestedAttribute{ - Optional: true, - Attributes: map[string]schema.Attribute{ - "method": schema.StringAttribute{ - Required: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "Xmin", - ), - }, - Description: `must be one of ["Xmin"]`, - }, - }, - Description: `Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally.`, + Description: `Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at).`, }, }, Validators: []validator.Object{ validators.ExactlyOneChild(), }, - Description: `Replication method for extracting data from the database.`, + Description: `Configures how data is extracted from the database.`, }, "schemas": schema.ListAttribute{ Optional: true, diff --git a/internal/provider/source_postgres_resource_sdk.go b/internal/provider/source_postgres_resource_sdk.go index 1e617a7f4..1623a5ac9 100755 --- a/internal/provider/source_postgres_resource_sdk.go +++ b/internal/provider/source_postgres_resource_sdk.go @@ -24,57 +24,45 @@ func (r *SourcePostgresResourceModel) ToCreateSDKType() *shared.SourcePostgresCr password = nil } port := r.Configuration.Port.ValueInt64() - var replicationMethod *shared.SourcePostgresReplicationMethod + var replicationMethod *shared.SourcePostgresUpdateMethod if r.Configuration.ReplicationMethod != nil { - var sourcePostgresReplicationMethodStandardXmin *shared.SourcePostgresReplicationMethodStandardXmin - if r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodStandardXmin != nil { - method := shared.SourcePostgresReplicationMethodStandardXminMethod(r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodStandardXmin.Method.ValueString()) - sourcePostgresReplicationMethodStandardXmin = &shared.SourcePostgresReplicationMethodStandardXmin{ - Method: method, - } - } - if sourcePostgresReplicationMethodStandardXmin != nil { - replicationMethod = &shared.SourcePostgresReplicationMethod{ - SourcePostgresReplicationMethodStandardXmin: sourcePostgresReplicationMethodStandardXmin, - } - } - var sourcePostgresReplicationMethodLogicalReplicationCDC *shared.SourcePostgresReplicationMethodLogicalReplicationCDC - if r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC != nil { + var sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC + if r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC != nil { initialWaitingSeconds := new(int64) - if !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsNull() { - *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.ValueInt64() + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsNull() { + *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.ValueInt64() } else { initialWaitingSeconds = nil } - lsnCommitBehaviour := new(shared.SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) - if !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsNull() { - *lsnCommitBehaviour = shared.SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.ValueString()) + lsnCommitBehaviour := new(shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsNull() { + *lsnCommitBehaviour = shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.ValueString()) } else { lsnCommitBehaviour = nil } - method1 := shared.SourcePostgresReplicationMethodLogicalReplicationCDCMethod(r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.Method.ValueString()) - plugin := new(shared.SourcePostgresReplicationMethodLogicalReplicationCDCPlugin) - if !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.Plugin.IsNull() { - *plugin = shared.SourcePostgresReplicationMethodLogicalReplicationCDCPlugin(r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.Plugin.ValueString()) + method := shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Method.ValueString()) + plugin := new(shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsNull() { + *plugin = shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.ValueString()) } else { plugin = nil } - publication := r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.Publication.ValueString() + publication := r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.Publication.ValueString() queueSize := new(int64) - if !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.QueueSize.IsNull() { - *queueSize = r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.QueueSize.ValueInt64() + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsNull() { + *queueSize = r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.ValueInt64() } else { queueSize = nil } - replicationSlot := r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.ReplicationSlot.ValueString() + replicationSlot := r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.ReplicationSlot.ValueString() var additionalProperties interface{} - if !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsNull() { - _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodLogicalReplicationCDC.AdditionalProperties.ValueString()), &additionalProperties) + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsNull() { + _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.ValueString()), &additionalProperties) } - sourcePostgresReplicationMethodLogicalReplicationCDC = &shared.SourcePostgresReplicationMethodLogicalReplicationCDC{ + sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC = &shared.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC{ InitialWaitingSeconds: initialWaitingSeconds, LsnCommitBehaviour: lsnCommitBehaviour, - Method: method1, + Method: method, Plugin: plugin, Publication: publication, QueueSize: queueSize, @@ -82,21 +70,33 @@ func (r *SourcePostgresResourceModel) ToCreateSDKType() *shared.SourcePostgresCr AdditionalProperties: additionalProperties, } } - if sourcePostgresReplicationMethodLogicalReplicationCDC != nil { - replicationMethod = &shared.SourcePostgresReplicationMethod{ - SourcePostgresReplicationMethodLogicalReplicationCDC: sourcePostgresReplicationMethodLogicalReplicationCDC, + if sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC != nil { + replicationMethod = &shared.SourcePostgresUpdateMethod{ + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC: sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC, + } + } + var sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *shared.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn + if r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn != nil { + method1 := shared.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn.Method.ValueString()) + sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn = &shared.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn{ + Method: method1, + } + } + if sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn != nil { + replicationMethod = &shared.SourcePostgresUpdateMethod{ + SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn: sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn, } } - var sourcePostgresReplicationMethodStandard *shared.SourcePostgresReplicationMethodStandard - if r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodStandard != nil { - method2 := shared.SourcePostgresReplicationMethodStandardMethod(r.Configuration.ReplicationMethod.SourcePostgresReplicationMethodStandard.Method.ValueString()) - sourcePostgresReplicationMethodStandard = &shared.SourcePostgresReplicationMethodStandard{ + var sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *shared.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor + if r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor != nil { + method2 := shared.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString()) + sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor{ Method: method2, } } - if sourcePostgresReplicationMethodStandard != nil { - replicationMethod = &shared.SourcePostgresReplicationMethod{ - SourcePostgresReplicationMethodStandard: sourcePostgresReplicationMethodStandard, + if sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor != nil { + replicationMethod = &shared.SourcePostgresUpdateMethod{ + SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor: sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor, } } } @@ -363,57 +363,45 @@ func (r *SourcePostgresResourceModel) ToUpdateSDKType() *shared.SourcePostgresPu password = nil } port := r.Configuration.Port.ValueInt64() - var replicationMethod *shared.SourcePostgresUpdateReplicationMethod + var replicationMethod *shared.SourcePostgresUpdateUpdateMethod if r.Configuration.ReplicationMethod != nil { - var sourcePostgresUpdateReplicationMethodStandardXmin *shared.SourcePostgresUpdateReplicationMethodStandardXmin - if r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodStandardXmin != nil { - method := shared.SourcePostgresUpdateReplicationMethodStandardXminMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodStandardXmin.Method.ValueString()) - sourcePostgresUpdateReplicationMethodStandardXmin = &shared.SourcePostgresUpdateReplicationMethodStandardXmin{ - Method: method, - } - } - if sourcePostgresUpdateReplicationMethodStandardXmin != nil { - replicationMethod = &shared.SourcePostgresUpdateReplicationMethod{ - SourcePostgresUpdateReplicationMethodStandardXmin: sourcePostgresUpdateReplicationMethodStandardXmin, - } - } - var sourcePostgresUpdateReplicationMethodLogicalReplicationCDC *shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC - if r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC != nil { + var sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC + if r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC != nil { initialWaitingSeconds := new(int64) - if !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.IsNull() { - *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.InitialWaitingSeconds.ValueInt64() + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.IsNull() { + *initialWaitingSeconds = r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.InitialWaitingSeconds.ValueInt64() } else { initialWaitingSeconds = nil } - lsnCommitBehaviour := new(shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) - if !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.IsNull() { - *lsnCommitBehaviour = shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.LsnCommitBehaviour.ValueString()) + lsnCommitBehaviour := new(shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.IsNull() { + *lsnCommitBehaviour = shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.LsnCommitBehaviour.ValueString()) } else { lsnCommitBehaviour = nil } - method1 := shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.Method.ValueString()) - plugin := new(shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin) - if !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.Plugin.IsNull() { - *plugin = shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin(r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.Plugin.ValueString()) + method := shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Method.ValueString()) + plugin := new(shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.IsNull() { + *plugin = shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Plugin.ValueString()) } else { plugin = nil } - publication := r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.Publication.ValueString() + publication := r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.Publication.ValueString() queueSize := new(int64) - if !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.QueueSize.IsNull() { - *queueSize = r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.QueueSize.ValueInt64() + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.IsNull() { + *queueSize = r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.QueueSize.ValueInt64() } else { queueSize = nil } - replicationSlot := r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.ReplicationSlot.ValueString() + replicationSlot := r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.ReplicationSlot.ValueString() var additionalProperties interface{} - if !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.AdditionalProperties.IsNull() { - _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC.AdditionalProperties.ValueString()), &additionalProperties) + if !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsUnknown() && !r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.IsNull() { + _ = json.Unmarshal([]byte(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC.AdditionalProperties.ValueString()), &additionalProperties) } - sourcePostgresUpdateReplicationMethodLogicalReplicationCDC = &shared.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC{ + sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC = &shared.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC{ InitialWaitingSeconds: initialWaitingSeconds, LsnCommitBehaviour: lsnCommitBehaviour, - Method: method1, + Method: method, Plugin: plugin, Publication: publication, QueueSize: queueSize, @@ -421,21 +409,33 @@ func (r *SourcePostgresResourceModel) ToUpdateSDKType() *shared.SourcePostgresPu AdditionalProperties: additionalProperties, } } - if sourcePostgresUpdateReplicationMethodLogicalReplicationCDC != nil { - replicationMethod = &shared.SourcePostgresUpdateReplicationMethod{ - SourcePostgresUpdateReplicationMethodLogicalReplicationCDC: sourcePostgresUpdateReplicationMethodLogicalReplicationCDC, + if sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC != nil { + replicationMethod = &shared.SourcePostgresUpdateUpdateMethod{ + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC: sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC, + } + } + var sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *shared.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn + if r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn != nil { + method1 := shared.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn.Method.ValueString()) + sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn = &shared.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn{ + Method: method1, + } + } + if sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn != nil { + replicationMethod = &shared.SourcePostgresUpdateUpdateMethod{ + SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn: sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn, } } - var sourcePostgresUpdateReplicationMethodStandard *shared.SourcePostgresUpdateReplicationMethodStandard - if r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodStandard != nil { - method2 := shared.SourcePostgresUpdateReplicationMethodStandardMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateReplicationMethodStandard.Method.ValueString()) - sourcePostgresUpdateReplicationMethodStandard = &shared.SourcePostgresUpdateReplicationMethodStandard{ + var sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *shared.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor + if r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { + method2 := shared.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(r.Configuration.ReplicationMethod.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor.Method.ValueString()) + sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor = &shared.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor{ Method: method2, } } - if sourcePostgresUpdateReplicationMethodStandard != nil { - replicationMethod = &shared.SourcePostgresUpdateReplicationMethod{ - SourcePostgresUpdateReplicationMethodStandard: sourcePostgresUpdateReplicationMethodStandard, + if sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { + replicationMethod = &shared.SourcePostgresUpdateUpdateMethod{ + SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor: sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor, } } } diff --git a/internal/provider/source_posthog_data_source.go b/internal/provider/source_posthog_data_source.go index fd508c4bb..8a2130162 100755 --- a/internal/provider/source_posthog_data_source.go +++ b/internal/provider/source_posthog_data_source.go @@ -61,6 +61,10 @@ func (r *SourcePosthogDataSource) Schema(ctx context.Context, req datasource.Sch Computed: true, Description: `Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).`, }, + "events_time_step": schema.Int64Attribute{ + Computed: true, + Description: `Set lower value in case of failing long running sync of events stream.`, + }, "source_type": schema.StringAttribute{ Computed: true, Validators: []validator.String{ diff --git a/internal/provider/source_posthog_resource.go b/internal/provider/source_posthog_resource.go index 942f56b3c..7e7f785d3 100755 --- a/internal/provider/source_posthog_resource.go +++ b/internal/provider/source_posthog_resource.go @@ -63,6 +63,10 @@ func (r *SourcePosthogResource) Schema(ctx context.Context, req resource.SchemaR Optional: true, Description: `Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com).`, }, + "events_time_step": schema.Int64Attribute{ + Optional: true, + Description: `Set lower value in case of failing long running sync of events stream.`, + }, "source_type": schema.StringAttribute{ Required: true, Validators: []validator.String{ diff --git a/internal/provider/source_posthog_resource_sdk.go b/internal/provider/source_posthog_resource_sdk.go index d5c81094e..9570e4d14 100755 --- a/internal/provider/source_posthog_resource_sdk.go +++ b/internal/provider/source_posthog_resource_sdk.go @@ -16,13 +16,20 @@ func (r *SourcePosthogResourceModel) ToCreateSDKType() *shared.SourcePosthogCrea } else { baseURL = nil } + eventsTimeStep := new(int64) + if !r.Configuration.EventsTimeStep.IsUnknown() && !r.Configuration.EventsTimeStep.IsNull() { + *eventsTimeStep = r.Configuration.EventsTimeStep.ValueInt64() + } else { + eventsTimeStep = nil + } sourceType := shared.SourcePosthogPosthog(r.Configuration.SourceType.ValueString()) startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) configuration := shared.SourcePosthog{ - APIKey: apiKey, - BaseURL: baseURL, - SourceType: sourceType, - StartDate: startDate, + APIKey: apiKey, + BaseURL: baseURL, + EventsTimeStep: eventsTimeStep, + SourceType: sourceType, + StartDate: startDate, } name := r.Name.ValueString() secretID := new(string) @@ -54,11 +61,18 @@ func (r *SourcePosthogResourceModel) ToUpdateSDKType() *shared.SourcePosthogPutR } else { baseURL = nil } + eventsTimeStep := new(int64) + if !r.Configuration.EventsTimeStep.IsUnknown() && !r.Configuration.EventsTimeStep.IsNull() { + *eventsTimeStep = r.Configuration.EventsTimeStep.ValueInt64() + } else { + eventsTimeStep = nil + } startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) configuration := shared.SourcePosthogUpdate{ - APIKey: apiKey, - BaseURL: baseURL, - StartDate: startDate, + APIKey: apiKey, + BaseURL: baseURL, + EventsTimeStep: eventsTimeStep, + StartDate: startDate, } name := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() diff --git a/internal/provider/source_publicapis_data_source.go b/internal/provider/source_publicapis_data_source.go deleted file mode 100755 index cb83b30fb..000000000 --- a/internal/provider/source_publicapis_data_source.go +++ /dev/null @@ -1,149 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk" - "airbyte/internal/sdk/pkg/models/operations" - "context" - "fmt" - - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/datasource" - "github.com/hashicorp/terraform-plugin-framework/datasource/schema" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -// Ensure provider defined types fully satisfy framework interfaces. -var _ datasource.DataSource = &SourcePublicApisDataSource{} -var _ datasource.DataSourceWithConfigure = &SourcePublicApisDataSource{} - -func NewSourcePublicApisDataSource() datasource.DataSource { - return &SourcePublicApisDataSource{} -} - -// SourcePublicApisDataSource is the data source implementation. -type SourcePublicApisDataSource struct { - client *sdk.SDK -} - -// SourcePublicApisDataSourceModel describes the data model. -type SourcePublicApisDataSourceModel struct { - Configuration SourcePublicApis `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - WorkspaceID types.String `tfsdk:"workspace_id"` -} - -// Metadata returns the data source type name. -func (r *SourcePublicApisDataSource) Metadata(ctx context.Context, req datasource.MetadataRequest, resp *datasource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_source_public_apis" -} - -// Schema defines the schema for the data source. -func (r *SourcePublicApisDataSource) Schema(ctx context.Context, req datasource.SchemaRequest, resp *datasource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "SourcePublicApis DataSource", - - Attributes: map[string]schema.Attribute{ - "configuration": schema.SingleNestedAttribute{ - Computed: true, - Attributes: map[string]schema.Attribute{ - "source_type": schema.StringAttribute{ - Computed: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "public-apis", - ), - }, - Description: `must be one of ["public-apis"]`, - }, - }, - }, - "name": schema.StringAttribute{ - Computed: true, - }, - "secret_id": schema.StringAttribute{ - Optional: true, - Description: `Optional secretID obtained through the public API OAuth redirect flow.`, - }, - "source_id": schema.StringAttribute{ - Required: true, - }, - "workspace_id": schema.StringAttribute{ - Computed: true, - }, - }, - } -} - -func (r *SourcePublicApisDataSource) Configure(ctx context.Context, req datasource.ConfigureRequest, resp *datasource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - client, ok := req.ProviderData.(*sdk.SDK) - - if !ok { - resp.Diagnostics.AddError( - "Unexpected DataSource Configure Type", - fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - - return - } - - r.client = client -} - -func (r *SourcePublicApisDataSource) Read(ctx context.Context, req datasource.ReadRequest, resp *datasource.ReadResponse) { - var data *SourcePublicApisDataSourceModel - var item types.Object - - resp.Diagnostics.Append(req.Config.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.GetSourcePublicApisRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.GetSourcePublicApis(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromGetResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} diff --git a/internal/provider/source_publicapis_data_source_sdk.go b/internal/provider/source_publicapis_data_source_sdk.go deleted file mode 100755 index 3a69a355e..000000000 --- a/internal/provider/source_publicapis_data_source_sdk.go +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "github.com/hashicorp/terraform-plugin-framework/types" -) - -func (r *SourcePublicApisDataSourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) { - r.Name = types.StringValue(resp.Name) - r.SourceID = types.StringValue(resp.SourceID) - r.WorkspaceID = types.StringValue(resp.WorkspaceID) -} diff --git a/internal/provider/source_publicapis_resource.go b/internal/provider/source_publicapis_resource.go deleted file mode 100755 index 59429f989..000000000 --- a/internal/provider/source_publicapis_resource.go +++ /dev/null @@ -1,315 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk" - "context" - "fmt" - - speakeasy_stringplanmodifier "airbyte/internal/planmodifiers/stringplanmodifier" - "airbyte/internal/sdk/pkg/models/operations" - "github.com/hashicorp/terraform-plugin-framework-validators/stringvalidator" - "github.com/hashicorp/terraform-plugin-framework/path" - "github.com/hashicorp/terraform-plugin-framework/resource" - "github.com/hashicorp/terraform-plugin-framework/resource/schema" - "github.com/hashicorp/terraform-plugin-framework/resource/schema/planmodifier" - "github.com/hashicorp/terraform-plugin-framework/schema/validator" - "github.com/hashicorp/terraform-plugin-framework/types" - "github.com/hashicorp/terraform-plugin-framework/types/basetypes" -) - -// Ensure provider defined types fully satisfy framework interfaces. -var _ resource.Resource = &SourcePublicApisResource{} -var _ resource.ResourceWithImportState = &SourcePublicApisResource{} - -func NewSourcePublicApisResource() resource.Resource { - return &SourcePublicApisResource{} -} - -// SourcePublicApisResource defines the resource implementation. -type SourcePublicApisResource struct { - client *sdk.SDK -} - -// SourcePublicApisResourceModel describes the resource data model. -type SourcePublicApisResourceModel struct { - Configuration SourcePublicApis `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - SourceType types.String `tfsdk:"source_type"` - WorkspaceID types.String `tfsdk:"workspace_id"` -} - -func (r *SourcePublicApisResource) Metadata(ctx context.Context, req resource.MetadataRequest, resp *resource.MetadataResponse) { - resp.TypeName = req.ProviderTypeName + "_source_public_apis" -} - -func (r *SourcePublicApisResource) Schema(ctx context.Context, req resource.SchemaRequest, resp *resource.SchemaResponse) { - resp.Schema = schema.Schema{ - MarkdownDescription: "SourcePublicApis Resource", - - Attributes: map[string]schema.Attribute{ - "configuration": schema.SingleNestedAttribute{ - Required: true, - Attributes: map[string]schema.Attribute{ - "source_type": schema.StringAttribute{ - Required: true, - Validators: []validator.String{ - stringvalidator.OneOf( - "public-apis", - ), - }, - Description: `must be one of ["public-apis"]`, - }, - }, - }, - "name": schema.StringAttribute{ - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - Required: true, - }, - "secret_id": schema.StringAttribute{ - Optional: true, - Description: `Optional secretID obtained through the public API OAuth redirect flow.`, - }, - "source_id": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - }, - "source_type": schema.StringAttribute{ - Computed: true, - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - }, - "workspace_id": schema.StringAttribute{ - PlanModifiers: []planmodifier.String{ - speakeasy_stringplanmodifier.SuppressDiff(), - }, - Required: true, - }, - }, - } -} - -func (r *SourcePublicApisResource) Configure(ctx context.Context, req resource.ConfigureRequest, resp *resource.ConfigureResponse) { - // Prevent panic if the provider has not been configured. - if req.ProviderData == nil { - return - } - - client, ok := req.ProviderData.(*sdk.SDK) - - if !ok { - resp.Diagnostics.AddError( - "Unexpected Resource Configure Type", - fmt.Sprintf("Expected *sdk.SDK, got: %T. Please report this issue to the provider developers.", req.ProviderData), - ) - - return - } - - r.client = client -} - -func (r *SourcePublicApisResource) Create(ctx context.Context, req resource.CreateRequest, resp *resource.CreateResponse) { - var data *SourcePublicApisResourceModel - var item types.Object - - resp.Diagnostics.Append(req.Plan.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - request := *data.ToCreateSDKType() - res, err := r.client.Sources.CreateSourcePublicApis(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromCreateResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourcePublicApisResource) Read(ctx context.Context, req resource.ReadRequest, resp *resource.ReadResponse) { - var data *SourcePublicApisResourceModel - var item types.Object - - resp.Diagnostics.Append(req.State.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.GetSourcePublicApisRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.GetSourcePublicApis(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if res.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - if res.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(res.RawResponse)) - return - } - data.RefreshFromGetResponse(res.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourcePublicApisResource) Update(ctx context.Context, req resource.UpdateRequest, resp *resource.UpdateResponse) { - var data *SourcePublicApisResourceModel - merge(ctx, req, resp, &data) - if resp.Diagnostics.HasError() { - return - } - - sourcePublicApisPutRequest := data.ToUpdateSDKType() - sourceID := data.SourceID.ValueString() - request := operations.PutSourcePublicApisRequest{ - SourcePublicApisPutRequest: sourcePublicApisPutRequest, - SourceID: sourceID, - } - res, err := r.client.Sources.PutSourcePublicApis(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - sourceId1 := data.SourceID.ValueString() - getRequest := operations.GetSourcePublicApisRequest{ - SourceID: sourceId1, - } - getResponse, err := r.client.Sources.GetSourcePublicApis(ctx, getRequest) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if getResponse == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", getResponse)) - return - } - if getResponse.StatusCode != 200 { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", getResponse.StatusCode), debugResponse(getResponse.RawResponse)) - return - } - if getResponse.SourceResponse == nil { - resp.Diagnostics.AddError("unexpected response from API. No response body", debugResponse(getResponse.RawResponse)) - return - } - data.RefreshFromGetResponse(getResponse.SourceResponse) - - // Save updated data into Terraform state - resp.Diagnostics.Append(resp.State.Set(ctx, &data)...) -} - -func (r *SourcePublicApisResource) Delete(ctx context.Context, req resource.DeleteRequest, resp *resource.DeleteResponse) { - var data *SourcePublicApisResourceModel - var item types.Object - - resp.Diagnostics.Append(req.State.Get(ctx, &item)...) - if resp.Diagnostics.HasError() { - return - } - - resp.Diagnostics.Append(item.As(ctx, &data, basetypes.ObjectAsOptions{ - UnhandledNullAsEmpty: true, - UnhandledUnknownAsEmpty: true, - })...) - - if resp.Diagnostics.HasError() { - return - } - - sourceID := data.SourceID.ValueString() - request := operations.DeleteSourcePublicApisRequest{ - SourceID: sourceID, - } - res, err := r.client.Sources.DeleteSourcePublicApis(ctx, request) - if err != nil { - resp.Diagnostics.AddError("failure to invoke API", err.Error()) - if res != nil && res.RawResponse != nil { - resp.Diagnostics.AddError("unexpected http request/response", debugResponse(res.RawResponse)) - } - return - } - if res == nil { - resp.Diagnostics.AddError("unexpected response from API", fmt.Sprintf("%v", res)) - return - } - if fmt.Sprintf("%v", res.StatusCode)[0] != '2' { - resp.Diagnostics.AddError(fmt.Sprintf("unexpected response from API. Got an unexpected response code %v", res.StatusCode), debugResponse(res.RawResponse)) - return - } - -} - -func (r *SourcePublicApisResource) ImportState(ctx context.Context, req resource.ImportStateRequest, resp *resource.ImportStateResponse) { - resource.ImportStatePassthroughID(ctx, path.Root("source_id"), req, resp) -} diff --git a/internal/provider/source_publicapis_resource_sdk.go b/internal/provider/source_publicapis_resource_sdk.go deleted file mode 100755 index 7f6a5033a..000000000 --- a/internal/provider/source_publicapis_resource_sdk.go +++ /dev/null @@ -1,63 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "github.com/hashicorp/terraform-plugin-framework/types" -) - -func (r *SourcePublicApisResourceModel) ToCreateSDKType() *shared.SourcePublicApisCreateRequest { - sourceType := shared.SourcePublicApisPublicApis(r.Configuration.SourceType.ValueString()) - configuration := shared.SourcePublicApis{ - SourceType: sourceType, - } - name := r.Name.ValueString() - secretID := new(string) - if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() { - *secretID = r.SecretID.ValueString() - } else { - secretID = nil - } - workspaceID := r.WorkspaceID.ValueString() - out := shared.SourcePublicApisCreateRequest{ - Configuration: configuration, - Name: name, - SecretID: secretID, - WorkspaceID: workspaceID, - } - return &out -} - -func (r *SourcePublicApisResourceModel) ToGetSDKType() *shared.SourcePublicApisCreateRequest { - out := r.ToCreateSDKType() - return out -} - -func (r *SourcePublicApisResourceModel) ToUpdateSDKType() *shared.SourcePublicApisPutRequest { - configuration := shared.SourcePublicApisUpdate{} - name := r.Name.ValueString() - workspaceID := r.WorkspaceID.ValueString() - out := shared.SourcePublicApisPutRequest{ - Configuration: configuration, - Name: name, - WorkspaceID: workspaceID, - } - return &out -} - -func (r *SourcePublicApisResourceModel) ToDeleteSDKType() *shared.SourcePublicApisCreateRequest { - out := r.ToCreateSDKType() - return out -} - -func (r *SourcePublicApisResourceModel) RefreshFromGetResponse(resp *shared.SourceResponse) { - r.Name = types.StringValue(resp.Name) - r.SourceID = types.StringValue(resp.SourceID) - r.SourceType = types.StringValue(resp.SourceType) - r.WorkspaceID = types.StringValue(resp.WorkspaceID) -} - -func (r *SourcePublicApisResourceModel) RefreshFromCreateResponse(resp *shared.SourceResponse) { - r.RefreshFromGetResponse(resp) -} diff --git a/internal/provider/source_s3_data_source.go b/internal/provider/source_s3_data_source.go index 0dafceb56..9cf3e173e 100755 --- a/internal/provider/source_s3_data_source.go +++ b/internal/provider/source_s3_data_source.go @@ -53,9 +53,25 @@ func (r *SourceS3DataSource) Schema(ctx context.Context, req datasource.SchemaRe "configuration": schema.SingleNestedAttribute{ Computed: true, Attributes: map[string]schema.Attribute{ + "aws_access_key_id": schema.StringAttribute{ + Computed: true, + Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`, + }, + "aws_secret_access_key": schema.StringAttribute{ + Computed: true, + Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`, + }, + "bucket": schema.StringAttribute{ + Computed: true, + Description: `Name of the S3 bucket where the file(s) exist.`, + }, "dataset": schema.StringAttribute{ Computed: true, - Description: `The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.`, + }, + "endpoint": schema.StringAttribute{ + Computed: true, + Description: `Endpoint to an S3 compatible service. Leave empty to use AWS.`, }, "format": schema.SingleNestedAttribute{ Computed: true, @@ -330,11 +346,11 @@ func (r *SourceS3DataSource) Schema(ctx context.Context, req datasource.SchemaRe Validators: []validator.Object{ validators.ExactlyOneChild(), }, - Description: `The format of the files you'd like to replicate`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate`, }, "path_pattern": schema.StringAttribute{ Computed: true, - Description: `A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.`, }, "provider": schema.SingleNestedAttribute{ Computed: true, @@ -367,11 +383,11 @@ func (r *SourceS3DataSource) Schema(ctx context.Context, req datasource.SchemaRe Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.`, }, }, - Description: `Use this to load files from S3 or S3-compatible services`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services`, }, "schema": schema.StringAttribute{ Computed: true, - Description: `Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`, }, "source_type": schema.StringAttribute{ Computed: true, @@ -382,7 +398,448 @@ func (r *SourceS3DataSource) Schema(ctx context.Context, req datasource.SchemaRe }, Description: `must be one of ["s3"]`, }, + "start_date": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + validators.IsRFC3339(), + }, + Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`, + }, + "streams": schema.ListNestedAttribute{ + Computed: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "days_to_sync_if_history_is_full": schema.Int64Attribute{ + Computed: true, + Description: `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`, + }, + "file_type": schema.StringAttribute{ + Computed: true, + Description: `The data file type that is being extracted for a stream.`, + }, + "format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "source_s3_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "double_as_string": schema.BoolAttribute{ + Computed: true, + Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`, + }, + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "avro", + ), + }, + Description: `must be one of ["avro"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "delimiter": schema.StringAttribute{ + Computed: true, + Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`, + }, + "double_quote": schema.BoolAttribute{ + Computed: true, + Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`, + }, + "encoding": schema.StringAttribute{ + Computed: true, + Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`, + }, + "escape_char": schema.StringAttribute{ + Computed: true, + Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`, + }, + "false_values": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as false values.`, + }, + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "csv", + ), + }, + Description: `must be one of ["csv"]`, + }, + "header_definition": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "From CSV", + ), + }, + Description: `must be one of ["From CSV"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Autogenerated", + ), + }, + Description: `must be one of ["Autogenerated"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "column_names": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `The column names that will be used while emitting the CSV records`, + }, + "header_definition_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "User Provided", + ), + }, + Description: `must be one of ["User Provided"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "inference_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "None", + "Primitive Types Only", + ), + }, + MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" + + `How to infer the types of the columns. If none, inference default to strings.`, + }, + "null_values": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`, + }, + "quote_char": schema.StringAttribute{ + Computed: true, + Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`, + }, + "skip_rows_after_header": schema.Int64Attribute{ + Computed: true, + Description: `The number of rows to skip after the header row.`, + }, + "skip_rows_before_header": schema.Int64Attribute{ + Computed: true, + Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`, + }, + "strings_can_be_null": schema.BoolAttribute{ + Computed: true, + Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`, + }, + "true_values": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as true values.`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "jsonl", + ), + }, + Description: `must be one of ["jsonl"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "decimal_as_float": schema.BoolAttribute{ + Computed: true, + Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`, + }, + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "parquet", + ), + }, + Description: `must be one of ["parquet"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "double_as_string": schema.BoolAttribute{ + Computed: true, + Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`, + }, + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "avro", + ), + }, + Description: `must be one of ["avro"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "delimiter": schema.StringAttribute{ + Computed: true, + Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`, + }, + "double_quote": schema.BoolAttribute{ + Computed: true, + Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`, + }, + "encoding": schema.StringAttribute{ + Computed: true, + Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`, + }, + "escape_char": schema.StringAttribute{ + Computed: true, + Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`, + }, + "false_values": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as false values.`, + }, + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "csv", + ), + }, + Description: `must be one of ["csv"]`, + }, + "header_definition": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "From CSV", + ), + }, + Description: `must be one of ["From CSV"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Autogenerated", + ), + }, + Description: `must be one of ["Autogenerated"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "column_names": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `The column names that will be used while emitting the CSV records`, + }, + "header_definition_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "User Provided", + ), + }, + Description: `must be one of ["User Provided"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "inference_type": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "None", + "Primitive Types Only", + ), + }, + MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" + + `How to infer the types of the columns. If none, inference default to strings.`, + }, + "null_values": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`, + }, + "quote_char": schema.StringAttribute{ + Computed: true, + Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`, + }, + "skip_rows_after_header": schema.Int64Attribute{ + Computed: true, + Description: `The number of rows to skip after the header row.`, + }, + "skip_rows_before_header": schema.Int64Attribute{ + Computed: true, + Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`, + }, + "strings_can_be_null": schema.BoolAttribute{ + Computed: true, + Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`, + }, + "true_values": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as true values.`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "jsonl", + ), + }, + Description: `must be one of ["jsonl"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{ + Computed: true, + Attributes: map[string]schema.Attribute{ + "decimal_as_float": schema.BoolAttribute{ + Computed: true, + Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`, + }, + "filetype": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "parquet", + ), + }, + Description: `must be one of ["parquet"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "globs": schema.ListAttribute{ + Computed: true, + ElementType: types.StringType, + Description: `The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.`, + }, + "input_schema": schema.StringAttribute{ + Computed: true, + Description: `The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.`, + }, + "legacy_prefix": schema.StringAttribute{ + Computed: true, + Description: `The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.`, + }, + "name": schema.StringAttribute{ + Computed: true, + Description: `The name of the stream.`, + }, + "primary_key": schema.StringAttribute{ + Computed: true, + Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`, + }, + "schemaless": schema.BoolAttribute{ + Computed: true, + Description: `When enabled, syncs will not validate or structure records against the stream's schema.`, + }, + "validation_policy": schema.StringAttribute{ + Computed: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Emit Record", + "Skip Record", + "Wait for Discover", + ), + }, + MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]` + "\n" + + `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`, + }, + }, + }, + Description: `Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.`, + }, }, + MarkdownDescription: `NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes` + "\n" + + `because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK.`, }, "name": schema.StringAttribute{ Computed: true, diff --git a/internal/provider/source_s3_resource.go b/internal/provider/source_s3_resource.go index da648361f..3d61c3ce6 100755 --- a/internal/provider/source_s3_resource.go +++ b/internal/provider/source_s3_resource.go @@ -55,9 +55,25 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques "configuration": schema.SingleNestedAttribute{ Required: true, Attributes: map[string]schema.Attribute{ - "dataset": schema.StringAttribute{ + "aws_access_key_id": schema.StringAttribute{ + Optional: true, + Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`, + }, + "aws_secret_access_key": schema.StringAttribute{ + Optional: true, + Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`, + }, + "bucket": schema.StringAttribute{ Required: true, - Description: `The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.`, + Description: `Name of the S3 bucket where the file(s) exist.`, + }, + "dataset": schema.StringAttribute{ + Optional: true, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores.`, + }, + "endpoint": schema.StringAttribute{ + Optional: true, + Description: `Endpoint to an S3 compatible service. Leave empty to use AWS.`, }, "format": schema.SingleNestedAttribute{ Optional: true, @@ -332,14 +348,14 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques Validators: []validator.Object{ validators.ExactlyOneChild(), }, - Description: `The format of the files you'd like to replicate`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate`, }, "path_pattern": schema.StringAttribute{ - Required: true, - Description: `A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.`, + Optional: true, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files.`, }, "provider": schema.SingleNestedAttribute{ - Required: true, + Optional: true, Attributes: map[string]schema.Attribute{ "aws_access_key_id": schema.StringAttribute{ Optional: true, @@ -350,7 +366,7 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques Description: `In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary.`, }, "bucket": schema.StringAttribute{ - Required: true, + Optional: true, Description: `Name of the S3 bucket where the file(s) exist.`, }, "endpoint": schema.StringAttribute{ @@ -369,11 +385,11 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques Description: `UTC date and time in the format 2017-01-25T00:00:00Z. Any file modified before this date will not be replicated.`, }, }, - Description: `Use this to load files from S3 or S3-compatible services`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services`, }, "schema": schema.StringAttribute{ Optional: true, - Description: `Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`, + Description: `Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema.`, }, "source_type": schema.StringAttribute{ Required: true, @@ -384,7 +400,448 @@ func (r *SourceS3Resource) Schema(ctx context.Context, req resource.SchemaReques }, Description: `must be one of ["s3"]`, }, + "start_date": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + validators.IsRFC3339(), + }, + Description: `UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated.`, + }, + "streams": schema.ListNestedAttribute{ + Required: true, + NestedObject: schema.NestedAttributeObject{ + Attributes: map[string]schema.Attribute{ + "days_to_sync_if_history_is_full": schema.Int64Attribute{ + Optional: true, + Description: `When the state history of the file store is full, syncs will only read files that were last modified in the provided day range.`, + }, + "file_type": schema.StringAttribute{ + Required: true, + Description: `The data file type that is being extracted for a stream.`, + }, + "format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "source_s3_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "double_as_string": schema.BoolAttribute{ + Optional: true, + Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`, + }, + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "avro", + ), + }, + Description: `must be one of ["avro"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "delimiter": schema.StringAttribute{ + Optional: true, + Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`, + }, + "double_quote": schema.BoolAttribute{ + Optional: true, + Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`, + }, + "encoding": schema.StringAttribute{ + Optional: true, + Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`, + }, + "escape_char": schema.StringAttribute{ + Optional: true, + Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`, + }, + "false_values": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as false values.`, + }, + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "csv", + ), + }, + Description: `must be one of ["csv"]`, + }, + "header_definition": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Autogenerated", + ), + }, + Description: `must be one of ["Autogenerated"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "From CSV", + ), + }, + Description: `must be one of ["From CSV"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "column_names": schema.ListAttribute{ + Required: true, + ElementType: types.StringType, + Description: `The column names that will be used while emitting the CSV records`, + }, + "header_definition_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "User Provided", + ), + }, + Description: `must be one of ["User Provided"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "inference_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "None", + "Primitive Types Only", + ), + }, + MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" + + `How to infer the types of the columns. If none, inference default to strings.`, + }, + "null_values": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`, + }, + "quote_char": schema.StringAttribute{ + Optional: true, + Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`, + }, + "skip_rows_after_header": schema.Int64Attribute{ + Optional: true, + Description: `The number of rows to skip after the header row.`, + }, + "skip_rows_before_header": schema.Int64Attribute{ + Optional: true, + Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`, + }, + "strings_can_be_null": schema.BoolAttribute{ + Optional: true, + Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`, + }, + "true_values": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as true values.`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "jsonl", + ), + }, + Description: `must be one of ["jsonl"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "decimal_as_float": schema.BoolAttribute{ + Optional: true, + Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`, + }, + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "parquet", + ), + }, + Description: `must be one of ["parquet"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_avro_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "double_as_string": schema.BoolAttribute{ + Optional: true, + Description: `Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers.`, + }, + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "avro", + ), + }, + Description: `must be one of ["avro"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_csv_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "delimiter": schema.StringAttribute{ + Optional: true, + Description: `The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'.`, + }, + "double_quote": schema.BoolAttribute{ + Optional: true, + Description: `Whether two quotes in a quoted CSV value denote a single quote in the data.`, + }, + "encoding": schema.StringAttribute{ + Optional: true, + Description: `The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options.`, + }, + "escape_char": schema.StringAttribute{ + Optional: true, + Description: `The character used for escaping special characters. To disallow escaping, leave this field blank.`, + }, + "false_values": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as false values.`, + }, + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "csv", + ), + }, + Description: `must be one of ["csv"]`, + }, + "header_definition": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "From CSV", + ), + }, + Description: `must be one of ["From CSV"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "header_definition_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Autogenerated", + ), + }, + Description: `must be one of ["Autogenerated"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "column_names": schema.ListAttribute{ + Required: true, + ElementType: types.StringType, + Description: `The column names that will be used while emitting the CSV records`, + }, + "header_definition_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "User Provided", + ), + }, + Description: `must be one of ["User Provided"]`, + }, + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `How headers will be defined. ` + "`" + `User Provided` + "`" + ` assumes the CSV does not have a header row and uses the headers provided and ` + "`" + `Autogenerated` + "`" + ` assumes the CSV does not have a header row and the CDK will generate headers using for ` + "`" + `f{i}` + "`" + ` where ` + "`" + `i` + "`" + ` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows.`, + }, + "inference_type": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "None", + "Primitive Types Only", + ), + }, + MarkdownDescription: `must be one of ["None", "Primitive Types Only"]` + "\n" + + `How to infer the types of the columns. If none, inference default to strings.`, + }, + "null_values": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field.`, + }, + "quote_char": schema.StringAttribute{ + Optional: true, + Description: `The character used for quoting CSV values. To disallow quoting, make this field blank.`, + }, + "skip_rows_after_header": schema.Int64Attribute{ + Optional: true, + Description: `The number of rows to skip after the header row.`, + }, + "skip_rows_before_header": schema.Int64Attribute{ + Optional: true, + Description: `The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field.`, + }, + "strings_can_be_null": schema.BoolAttribute{ + Optional: true, + Description: `Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself.`, + }, + "true_values": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `A set of case-sensitive strings that should be interpreted as true values.`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_jsonl_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "jsonl", + ), + }, + Description: `must be one of ["jsonl"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "source_s3_update_file_based_stream_config_format_parquet_format": schema.SingleNestedAttribute{ + Optional: true, + Attributes: map[string]schema.Attribute{ + "decimal_as_float": schema.BoolAttribute{ + Optional: true, + Description: `Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended.`, + }, + "filetype": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "parquet", + ), + }, + Description: `must be one of ["parquet"]`, + }, + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + }, + Validators: []validator.Object{ + validators.ExactlyOneChild(), + }, + Description: `The configuration options that are used to alter how to read incoming files that deviate from the standard formatting.`, + }, + "globs": schema.ListAttribute{ + Optional: true, + ElementType: types.StringType, + Description: `The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here.`, + }, + "input_schema": schema.StringAttribute{ + Optional: true, + Description: `The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files.`, + }, + "legacy_prefix": schema.StringAttribute{ + Optional: true, + Description: `The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob.`, + }, + "name": schema.StringAttribute{ + Required: true, + Description: `The name of the stream.`, + }, + "primary_key": schema.StringAttribute{ + Optional: true, + Description: `The column or columns (for a composite key) that serves as the unique identifier of a record.`, + }, + "schemaless": schema.BoolAttribute{ + Optional: true, + Description: `When enabled, syncs will not validate or structure records against the stream's schema.`, + }, + "validation_policy": schema.StringAttribute{ + Optional: true, + Validators: []validator.String{ + stringvalidator.OneOf( + "Emit Record", + "Skip Record", + "Wait for Discover", + ), + }, + MarkdownDescription: `must be one of ["Emit Record", "Skip Record", "Wait for Discover"]` + "\n" + + `The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema.`, + }, + }, + }, + Description: `Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table.`, + }, }, + MarkdownDescription: `NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes` + "\n" + + `because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK.`, }, "name": schema.StringAttribute{ PlanModifiers: []planmodifier.String{ diff --git a/internal/provider/source_s3_resource_sdk.go b/internal/provider/source_s3_resource_sdk.go index 3d4496260..b4f6ab052 100755 --- a/internal/provider/source_s3_resource_sdk.go +++ b/internal/provider/source_s3_resource_sdk.go @@ -9,7 +9,31 @@ import ( ) func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest { - dataset := r.Configuration.Dataset.ValueString() + awsAccessKeyID := new(string) + if !r.Configuration.AwsAccessKeyID.IsUnknown() && !r.Configuration.AwsAccessKeyID.IsNull() { + *awsAccessKeyID = r.Configuration.AwsAccessKeyID.ValueString() + } else { + awsAccessKeyID = nil + } + awsSecretAccessKey := new(string) + if !r.Configuration.AwsSecretAccessKey.IsUnknown() && !r.Configuration.AwsSecretAccessKey.IsNull() { + *awsSecretAccessKey = r.Configuration.AwsSecretAccessKey.ValueString() + } else { + awsSecretAccessKey = nil + } + bucket := r.Configuration.Bucket.ValueString() + dataset := new(string) + if !r.Configuration.Dataset.IsUnknown() && !r.Configuration.Dataset.IsNull() { + *dataset = r.Configuration.Dataset.ValueString() + } else { + dataset = nil + } + endpoint := new(string) + if !r.Configuration.Endpoint.IsUnknown() && !r.Configuration.Endpoint.IsNull() { + *endpoint = r.Configuration.Endpoint.ValueString() + } else { + endpoint = nil + } var format *shared.SourceS3FileFormat if r.Configuration.Format != nil { var sourceS3FileFormatCSV *shared.SourceS3FileFormatCSV @@ -191,62 +215,367 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest } } } - pathPattern := r.Configuration.PathPattern.ValueString() - awsAccessKeyID := new(string) - if !r.Configuration.Provider.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.AwsAccessKeyID.IsNull() { - *awsAccessKeyID = r.Configuration.Provider.AwsAccessKeyID.ValueString() + pathPattern := new(string) + if !r.Configuration.PathPattern.IsUnknown() && !r.Configuration.PathPattern.IsNull() { + *pathPattern = r.Configuration.PathPattern.ValueString() } else { - awsAccessKeyID = nil + pathPattern = nil } - awsSecretAccessKey := new(string) - if !r.Configuration.Provider.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.AwsSecretAccessKey.IsNull() { - *awsSecretAccessKey = r.Configuration.Provider.AwsSecretAccessKey.ValueString() - } else { - awsSecretAccessKey = nil + var provider *shared.SourceS3S3AmazonWebServices + if r.Configuration.Provider != nil { + awsAccessKeyId1 := new(string) + if !r.Configuration.Provider.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.AwsAccessKeyID.IsNull() { + *awsAccessKeyId1 = r.Configuration.Provider.AwsAccessKeyID.ValueString() + } else { + awsAccessKeyId1 = nil + } + awsSecretAccessKey1 := new(string) + if !r.Configuration.Provider.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.AwsSecretAccessKey.IsNull() { + *awsSecretAccessKey1 = r.Configuration.Provider.AwsSecretAccessKey.ValueString() + } else { + awsSecretAccessKey1 = nil + } + bucket1 := new(string) + if !r.Configuration.Provider.Bucket.IsUnknown() && !r.Configuration.Provider.Bucket.IsNull() { + *bucket1 = r.Configuration.Provider.Bucket.ValueString() + } else { + bucket1 = nil + } + endpoint1 := new(string) + if !r.Configuration.Provider.Endpoint.IsUnknown() && !r.Configuration.Provider.Endpoint.IsNull() { + *endpoint1 = r.Configuration.Provider.Endpoint.ValueString() + } else { + endpoint1 = nil + } + pathPrefix := new(string) + if !r.Configuration.Provider.PathPrefix.IsUnknown() && !r.Configuration.Provider.PathPrefix.IsNull() { + *pathPrefix = r.Configuration.Provider.PathPrefix.ValueString() + } else { + pathPrefix = nil + } + startDate := new(time.Time) + if !r.Configuration.Provider.StartDate.IsUnknown() && !r.Configuration.Provider.StartDate.IsNull() { + *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Provider.StartDate.ValueString()) + } else { + startDate = nil + } + provider = &shared.SourceS3S3AmazonWebServices{ + AwsAccessKeyID: awsAccessKeyId1, + AwsSecretAccessKey: awsSecretAccessKey1, + Bucket: bucket1, + Endpoint: endpoint1, + PathPrefix: pathPrefix, + StartDate: startDate, + } } - bucket := r.Configuration.Provider.Bucket.ValueString() - endpoint := new(string) - if !r.Configuration.Provider.Endpoint.IsUnknown() && !r.Configuration.Provider.Endpoint.IsNull() { - *endpoint = r.Configuration.Provider.Endpoint.ValueString() + schema := new(string) + if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() { + *schema = r.Configuration.Schema.ValueString() } else { - endpoint = nil + schema = nil } - pathPrefix := new(string) - if !r.Configuration.Provider.PathPrefix.IsUnknown() && !r.Configuration.Provider.PathPrefix.IsNull() { - *pathPrefix = r.Configuration.Provider.PathPrefix.ValueString() + sourceType := shared.SourceS3S3(r.Configuration.SourceType.ValueString()) + startDate1 := new(time.Time) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate1, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) } else { - pathPrefix = nil + startDate1 = nil } - startDate := new(time.Time) - if !r.Configuration.Provider.StartDate.IsUnknown() && !r.Configuration.Provider.StartDate.IsNull() { - *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Provider.StartDate.ValueString()) - } else { - startDate = nil + var streams []shared.SourceS3FileBasedStreamConfig = nil + for _, streamsItem := range r.Configuration.Streams { + daysToSyncIfHistoryIsFull := new(int64) + if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() { + *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64() + } else { + daysToSyncIfHistoryIsFull = nil + } + fileType := streamsItem.FileType.ValueString() + var format1 *shared.SourceS3FileBasedStreamConfigFormat + if streamsItem.Format != nil { + var sourceS3FileBasedStreamConfigFormatAvroFormat *shared.SourceS3FileBasedStreamConfigFormatAvroFormat + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat != nil { + doubleAsString := new(bool) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsNull() { + *doubleAsString = streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.DoubleAsString.ValueBool() + } else { + doubleAsString = nil + } + filetype4 := new(shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.Filetype.IsNull() { + *filetype4 = shared.SourceS3FileBasedStreamConfigFormatAvroFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatAvroFormat.Filetype.ValueString()) + } else { + filetype4 = nil + } + sourceS3FileBasedStreamConfigFormatAvroFormat = &shared.SourceS3FileBasedStreamConfigFormatAvroFormat{ + DoubleAsString: doubleAsString, + Filetype: filetype4, + } + } + if sourceS3FileBasedStreamConfigFormatAvroFormat != nil { + format1 = &shared.SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatAvroFormat: sourceS3FileBasedStreamConfigFormatAvroFormat, + } + } + var sourceS3FileBasedStreamConfigFormatCSVFormat *shared.SourceS3FileBasedStreamConfigFormatCSVFormat + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat != nil { + delimiter1 := new(string) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Delimiter.IsNull() { + *delimiter1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Delimiter.ValueString() + } else { + delimiter1 = nil + } + doubleQuote1 := new(bool) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsNull() { + *doubleQuote1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.DoubleQuote.ValueBool() + } else { + doubleQuote1 = nil + } + encoding1 := new(string) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Encoding.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Encoding.IsNull() { + *encoding1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Encoding.ValueString() + } else { + encoding1 = nil + } + escapeChar1 := new(string) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.EscapeChar.IsNull() { + *escapeChar1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.EscapeChar.ValueString() + } else { + escapeChar1 = nil + } + var falseValues []string = nil + for _, falseValuesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.FalseValues { + falseValues = append(falseValues, falseValuesItem.ValueString()) + } + filetype5 := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Filetype.IsNull() { + *filetype5 = shared.SourceS3FileBasedStreamConfigFormatCSVFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.Filetype.ValueString()) + } else { + filetype5 = nil + } + var headerDefinition *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition != nil { + var sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil { + headerDefinitionType := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsNull() { + *headerDefinitionType = shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.ValueString()) + } else { + headerDefinitionType = nil + } + sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV{ + HeaderDefinitionType: headerDefinitionType, + } + } + if sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil { + headerDefinition = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV, + } + } + var sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil { + headerDefinitionType1 := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsNull() { + *headerDefinitionType1 = shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.ValueString()) + } else { + headerDefinitionType1 = nil + } + sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated{ + HeaderDefinitionType: headerDefinitionType1, + } + } + if sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil { + headerDefinition = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated, + } + } + var sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil { + var columnNames []string = nil + for _, columnNamesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.ColumnNames { + columnNames = append(columnNames, columnNamesItem.ValueString()) + } + headerDefinitionType2 := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsNull() { + *headerDefinitionType2 = shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.ValueString()) + } else { + headerDefinitionType2 = nil + } + sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided{ + ColumnNames: columnNames, + HeaderDefinitionType: headerDefinitionType2, + } + } + if sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil { + headerDefinition = &shared.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided, + } + } + } + inferenceType := new(shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.InferenceType.IsNull() { + *inferenceType = shared.SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType(streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.InferenceType.ValueString()) + } else { + inferenceType = nil + } + var nullValues []string = nil + for _, nullValuesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.NullValues { + nullValues = append(nullValues, nullValuesItem.ValueString()) + } + quoteChar1 := new(string) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.QuoteChar.IsNull() { + *quoteChar1 = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.QuoteChar.ValueString() + } else { + quoteChar1 = nil + } + skipRowsAfterHeader := new(int64) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsNull() { + *skipRowsAfterHeader = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.ValueInt64() + } else { + skipRowsAfterHeader = nil + } + skipRowsBeforeHeader := new(int64) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsNull() { + *skipRowsBeforeHeader = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.ValueInt64() + } else { + skipRowsBeforeHeader = nil + } + stringsCanBeNull := new(bool) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsNull() { + *stringsCanBeNull = streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.ValueBool() + } else { + stringsCanBeNull = nil + } + var trueValues []string = nil + for _, trueValuesItem := range streamsItem.Format.SourceS3FileBasedStreamConfigFormatCSVFormat.TrueValues { + trueValues = append(trueValues, trueValuesItem.ValueString()) + } + sourceS3FileBasedStreamConfigFormatCSVFormat = &shared.SourceS3FileBasedStreamConfigFormatCSVFormat{ + Delimiter: delimiter1, + DoubleQuote: doubleQuote1, + Encoding: encoding1, + EscapeChar: escapeChar1, + FalseValues: falseValues, + Filetype: filetype5, + HeaderDefinition: headerDefinition, + InferenceType: inferenceType, + NullValues: nullValues, + QuoteChar: quoteChar1, + SkipRowsAfterHeader: skipRowsAfterHeader, + SkipRowsBeforeHeader: skipRowsBeforeHeader, + StringsCanBeNull: stringsCanBeNull, + TrueValues: trueValues, + } + } + if sourceS3FileBasedStreamConfigFormatCSVFormat != nil { + format1 = &shared.SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatCSVFormat: sourceS3FileBasedStreamConfigFormatCSVFormat, + } + } + var sourceS3FileBasedStreamConfigFormatJsonlFormat *shared.SourceS3FileBasedStreamConfigFormatJsonlFormat + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat != nil { + filetype6 := new(shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat.Filetype.IsNull() { + *filetype6 = shared.SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatJsonlFormat.Filetype.ValueString()) + } else { + filetype6 = nil + } + sourceS3FileBasedStreamConfigFormatJsonlFormat = &shared.SourceS3FileBasedStreamConfigFormatJsonlFormat{ + Filetype: filetype6, + } + } + if sourceS3FileBasedStreamConfigFormatJsonlFormat != nil { + format1 = &shared.SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatJsonlFormat: sourceS3FileBasedStreamConfigFormatJsonlFormat, + } + } + var sourceS3FileBasedStreamConfigFormatParquetFormat *shared.SourceS3FileBasedStreamConfigFormatParquetFormat + if streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat != nil { + decimalAsFloat := new(bool) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsNull() { + *decimalAsFloat = streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.ValueBool() + } else { + decimalAsFloat = nil + } + filetype7 := new(shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype) + if !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.Filetype.IsNull() { + *filetype7 = shared.SourceS3FileBasedStreamConfigFormatParquetFormatFiletype(streamsItem.Format.SourceS3FileBasedStreamConfigFormatParquetFormat.Filetype.ValueString()) + } else { + filetype7 = nil + } + sourceS3FileBasedStreamConfigFormatParquetFormat = &shared.SourceS3FileBasedStreamConfigFormatParquetFormat{ + DecimalAsFloat: decimalAsFloat, + Filetype: filetype7, + } + } + if sourceS3FileBasedStreamConfigFormatParquetFormat != nil { + format1 = &shared.SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatParquetFormat: sourceS3FileBasedStreamConfigFormatParquetFormat, + } + } + } + var globs []string = nil + for _, globsItem := range streamsItem.Globs { + globs = append(globs, globsItem.ValueString()) + } + inputSchema := new(string) + if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() { + *inputSchema = streamsItem.InputSchema.ValueString() + } else { + inputSchema = nil + } + legacyPrefix := new(string) + if !streamsItem.LegacyPrefix.IsUnknown() && !streamsItem.LegacyPrefix.IsNull() { + *legacyPrefix = streamsItem.LegacyPrefix.ValueString() + } else { + legacyPrefix = nil + } + name := streamsItem.Name.ValueString() + primaryKey := new(string) + if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() { + *primaryKey = streamsItem.PrimaryKey.ValueString() + } else { + primaryKey = nil + } + schemaless := new(bool) + if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() { + *schemaless = streamsItem.Schemaless.ValueBool() + } else { + schemaless = nil + } + validationPolicy := new(shared.SourceS3FileBasedStreamConfigValidationPolicy) + if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() { + *validationPolicy = shared.SourceS3FileBasedStreamConfigValidationPolicy(streamsItem.ValidationPolicy.ValueString()) + } else { + validationPolicy = nil + } + streams = append(streams, shared.SourceS3FileBasedStreamConfig{ + DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull, + FileType: fileType, + Format: format1, + Globs: globs, + InputSchema: inputSchema, + LegacyPrefix: legacyPrefix, + Name: name, + PrimaryKey: primaryKey, + Schemaless: schemaless, + ValidationPolicy: validationPolicy, + }) } - provider := shared.SourceS3S3AmazonWebServices{ + configuration := shared.SourceS3{ AwsAccessKeyID: awsAccessKeyID, AwsSecretAccessKey: awsSecretAccessKey, Bucket: bucket, + Dataset: dataset, Endpoint: endpoint, - PathPrefix: pathPrefix, - StartDate: startDate, - } - schema := new(string) - if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() { - *schema = r.Configuration.Schema.ValueString() - } else { - schema = nil - } - sourceType := shared.SourceS3S3(r.Configuration.SourceType.ValueString()) - configuration := shared.SourceS3{ - Dataset: dataset, - Format: format, - PathPattern: pathPattern, - Provider: provider, - Schema: schema, - SourceType: sourceType, + Format: format, + PathPattern: pathPattern, + Provider: provider, + Schema: schema, + SourceType: sourceType, + StartDate: startDate1, + Streams: streams, } - name := r.Name.ValueString() + name1 := r.Name.ValueString() secretID := new(string) if !r.SecretID.IsUnknown() && !r.SecretID.IsNull() { *secretID = r.SecretID.ValueString() @@ -256,7 +585,7 @@ func (r *SourceS3ResourceModel) ToCreateSDKType() *shared.SourceS3CreateRequest workspaceID := r.WorkspaceID.ValueString() out := shared.SourceS3CreateRequest{ Configuration: configuration, - Name: name, + Name: name1, SecretID: secretID, WorkspaceID: workspaceID, } @@ -269,7 +598,31 @@ func (r *SourceS3ResourceModel) ToGetSDKType() *shared.SourceS3CreateRequest { } func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest { - dataset := r.Configuration.Dataset.ValueString() + awsAccessKeyID := new(string) + if !r.Configuration.AwsAccessKeyID.IsUnknown() && !r.Configuration.AwsAccessKeyID.IsNull() { + *awsAccessKeyID = r.Configuration.AwsAccessKeyID.ValueString() + } else { + awsAccessKeyID = nil + } + awsSecretAccessKey := new(string) + if !r.Configuration.AwsSecretAccessKey.IsUnknown() && !r.Configuration.AwsSecretAccessKey.IsNull() { + *awsSecretAccessKey = r.Configuration.AwsSecretAccessKey.ValueString() + } else { + awsSecretAccessKey = nil + } + bucket := r.Configuration.Bucket.ValueString() + dataset := new(string) + if !r.Configuration.Dataset.IsUnknown() && !r.Configuration.Dataset.IsNull() { + *dataset = r.Configuration.Dataset.ValueString() + } else { + dataset = nil + } + endpoint := new(string) + if !r.Configuration.Endpoint.IsUnknown() && !r.Configuration.Endpoint.IsNull() { + *endpoint = r.Configuration.Endpoint.ValueString() + } else { + endpoint = nil + } var format *shared.SourceS3UpdateFileFormat if r.Configuration.Format != nil { var sourceS3UpdateFileFormatCSV *shared.SourceS3UpdateFileFormatCSV @@ -451,64 +804,369 @@ func (r *SourceS3ResourceModel) ToUpdateSDKType() *shared.SourceS3PutRequest { } } } - pathPattern := r.Configuration.PathPattern.ValueString() - awsAccessKeyID := new(string) - if !r.Configuration.Provider.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.AwsAccessKeyID.IsNull() { - *awsAccessKeyID = r.Configuration.Provider.AwsAccessKeyID.ValueString() + pathPattern := new(string) + if !r.Configuration.PathPattern.IsUnknown() && !r.Configuration.PathPattern.IsNull() { + *pathPattern = r.Configuration.PathPattern.ValueString() } else { - awsAccessKeyID = nil + pathPattern = nil } - awsSecretAccessKey := new(string) - if !r.Configuration.Provider.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.AwsSecretAccessKey.IsNull() { - *awsSecretAccessKey = r.Configuration.Provider.AwsSecretAccessKey.ValueString() - } else { - awsSecretAccessKey = nil + var provider *shared.SourceS3UpdateS3AmazonWebServices + if r.Configuration.Provider != nil { + awsAccessKeyId1 := new(string) + if !r.Configuration.Provider.AwsAccessKeyID.IsUnknown() && !r.Configuration.Provider.AwsAccessKeyID.IsNull() { + *awsAccessKeyId1 = r.Configuration.Provider.AwsAccessKeyID.ValueString() + } else { + awsAccessKeyId1 = nil + } + awsSecretAccessKey1 := new(string) + if !r.Configuration.Provider.AwsSecretAccessKey.IsUnknown() && !r.Configuration.Provider.AwsSecretAccessKey.IsNull() { + *awsSecretAccessKey1 = r.Configuration.Provider.AwsSecretAccessKey.ValueString() + } else { + awsSecretAccessKey1 = nil + } + bucket1 := new(string) + if !r.Configuration.Provider.Bucket.IsUnknown() && !r.Configuration.Provider.Bucket.IsNull() { + *bucket1 = r.Configuration.Provider.Bucket.ValueString() + } else { + bucket1 = nil + } + endpoint1 := new(string) + if !r.Configuration.Provider.Endpoint.IsUnknown() && !r.Configuration.Provider.Endpoint.IsNull() { + *endpoint1 = r.Configuration.Provider.Endpoint.ValueString() + } else { + endpoint1 = nil + } + pathPrefix := new(string) + if !r.Configuration.Provider.PathPrefix.IsUnknown() && !r.Configuration.Provider.PathPrefix.IsNull() { + *pathPrefix = r.Configuration.Provider.PathPrefix.ValueString() + } else { + pathPrefix = nil + } + startDate := new(time.Time) + if !r.Configuration.Provider.StartDate.IsUnknown() && !r.Configuration.Provider.StartDate.IsNull() { + *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Provider.StartDate.ValueString()) + } else { + startDate = nil + } + provider = &shared.SourceS3UpdateS3AmazonWebServices{ + AwsAccessKeyID: awsAccessKeyId1, + AwsSecretAccessKey: awsSecretAccessKey1, + Bucket: bucket1, + Endpoint: endpoint1, + PathPrefix: pathPrefix, + StartDate: startDate, + } } - bucket := r.Configuration.Provider.Bucket.ValueString() - endpoint := new(string) - if !r.Configuration.Provider.Endpoint.IsUnknown() && !r.Configuration.Provider.Endpoint.IsNull() { - *endpoint = r.Configuration.Provider.Endpoint.ValueString() + schema := new(string) + if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() { + *schema = r.Configuration.Schema.ValueString() } else { - endpoint = nil + schema = nil } - pathPrefix := new(string) - if !r.Configuration.Provider.PathPrefix.IsUnknown() && !r.Configuration.Provider.PathPrefix.IsNull() { - *pathPrefix = r.Configuration.Provider.PathPrefix.ValueString() + startDate1 := new(time.Time) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate1, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) } else { - pathPrefix = nil + startDate1 = nil } - startDate := new(time.Time) - if !r.Configuration.Provider.StartDate.IsUnknown() && !r.Configuration.Provider.StartDate.IsNull() { - *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.Provider.StartDate.ValueString()) - } else { - startDate = nil + var streams []shared.SourceS3UpdateFileBasedStreamConfig = nil + for _, streamsItem := range r.Configuration.Streams { + daysToSyncIfHistoryIsFull := new(int64) + if !streamsItem.DaysToSyncIfHistoryIsFull.IsUnknown() && !streamsItem.DaysToSyncIfHistoryIsFull.IsNull() { + *daysToSyncIfHistoryIsFull = streamsItem.DaysToSyncIfHistoryIsFull.ValueInt64() + } else { + daysToSyncIfHistoryIsFull = nil + } + fileType := streamsItem.FileType.ValueString() + var format1 *shared.SourceS3UpdateFileBasedStreamConfigFormat + if streamsItem.Format != nil { + var sourceS3UpdateFileBasedStreamConfigFormatAvroFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat != nil { + doubleAsString := new(bool) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.DoubleAsString.IsNull() { + *doubleAsString = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.DoubleAsString.ValueBool() + } else { + doubleAsString = nil + } + filetype4 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.Filetype.IsNull() { + *filetype4 = shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat.Filetype.ValueString()) + } else { + filetype4 = nil + } + sourceS3UpdateFileBasedStreamConfigFormatAvroFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat{ + DoubleAsString: doubleAsString, + Filetype: filetype4, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatAvroFormat != nil { + format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatAvroFormat: sourceS3UpdateFileBasedStreamConfigFormatAvroFormat, + } + } + var sourceS3UpdateFileBasedStreamConfigFormatCSVFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat != nil { + delimiter1 := new(string) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Delimiter.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Delimiter.IsNull() { + *delimiter1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Delimiter.ValueString() + } else { + delimiter1 = nil + } + doubleQuote1 := new(bool) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.DoubleQuote.IsNull() { + *doubleQuote1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.DoubleQuote.ValueBool() + } else { + doubleQuote1 = nil + } + encoding1 := new(string) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Encoding.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Encoding.IsNull() { + *encoding1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Encoding.ValueString() + } else { + encoding1 = nil + } + escapeChar1 := new(string) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.EscapeChar.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.EscapeChar.IsNull() { + *escapeChar1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.EscapeChar.ValueString() + } else { + escapeChar1 = nil + } + var falseValues []string = nil + for _, falseValuesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.FalseValues { + falseValues = append(falseValues, falseValuesItem.ValueString()) + } + filetype5 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Filetype.IsNull() { + *filetype5 = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.Filetype.ValueString()) + } else { + filetype5 = nil + } + var headerDefinition *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition != nil { + var sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil { + headerDefinitionType := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.IsNull() { + *headerDefinitionType = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV.HeaderDefinitionType.ValueString()) + } else { + headerDefinitionType = nil + } + sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV{ + HeaderDefinitionType: headerDefinitionType, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil { + headerDefinition = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV, + } + } + var sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil { + headerDefinitionType1 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.IsNull() { + *headerDefinitionType1 = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated.HeaderDefinitionType.ValueString()) + } else { + headerDefinitionType1 = nil + } + sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated{ + HeaderDefinitionType: headerDefinitionType1, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil { + headerDefinition = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated, + } + } + var sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil { + var columnNames []string = nil + for _, columnNamesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.ColumnNames { + columnNames = append(columnNames, columnNamesItem.ValueString()) + } + headerDefinitionType2 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.IsNull() { + *headerDefinitionType2 = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.HeaderDefinition.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided.HeaderDefinitionType.ValueString()) + } else { + headerDefinitionType2 = nil + } + sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided{ + ColumnNames: columnNames, + HeaderDefinitionType: headerDefinitionType2, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil { + headerDefinition = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided, + } + } + } + inferenceType := new(shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.InferenceType.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.InferenceType.IsNull() { + *inferenceType = shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.InferenceType.ValueString()) + } else { + inferenceType = nil + } + var nullValues []string = nil + for _, nullValuesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.NullValues { + nullValues = append(nullValues, nullValuesItem.ValueString()) + } + quoteChar1 := new(string) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.QuoteChar.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.QuoteChar.IsNull() { + *quoteChar1 = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.QuoteChar.ValueString() + } else { + quoteChar1 = nil + } + skipRowsAfterHeader := new(int64) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.IsNull() { + *skipRowsAfterHeader = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsAfterHeader.ValueInt64() + } else { + skipRowsAfterHeader = nil + } + skipRowsBeforeHeader := new(int64) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.IsNull() { + *skipRowsBeforeHeader = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.SkipRowsBeforeHeader.ValueInt64() + } else { + skipRowsBeforeHeader = nil + } + stringsCanBeNull := new(bool) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.IsNull() { + *stringsCanBeNull = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.StringsCanBeNull.ValueBool() + } else { + stringsCanBeNull = nil + } + var trueValues []string = nil + for _, trueValuesItem := range streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat.TrueValues { + trueValues = append(trueValues, trueValuesItem.ValueString()) + } + sourceS3UpdateFileBasedStreamConfigFormatCSVFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat{ + Delimiter: delimiter1, + DoubleQuote: doubleQuote1, + Encoding: encoding1, + EscapeChar: escapeChar1, + FalseValues: falseValues, + Filetype: filetype5, + HeaderDefinition: headerDefinition, + InferenceType: inferenceType, + NullValues: nullValues, + QuoteChar: quoteChar1, + SkipRowsAfterHeader: skipRowsAfterHeader, + SkipRowsBeforeHeader: skipRowsBeforeHeader, + StringsCanBeNull: stringsCanBeNull, + TrueValues: trueValues, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatCSVFormat != nil { + format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormat: sourceS3UpdateFileBasedStreamConfigFormatCSVFormat, + } + } + var sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat != nil { + filetype6 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat.Filetype.IsNull() { + *filetype6 = shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat.Filetype.ValueString()) + } else { + filetype6 = nil + } + sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat{ + Filetype: filetype6, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat != nil { + format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat: sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat, + } + } + var sourceS3UpdateFileBasedStreamConfigFormatParquetFormat *shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat + if streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat != nil { + decimalAsFloat := new(bool) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.IsNull() { + *decimalAsFloat = streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.DecimalAsFloat.ValueBool() + } else { + decimalAsFloat = nil + } + filetype7 := new(shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype) + if !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.Filetype.IsUnknown() && !streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.Filetype.IsNull() { + *filetype7 = shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype(streamsItem.Format.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat.Filetype.ValueString()) + } else { + filetype7 = nil + } + sourceS3UpdateFileBasedStreamConfigFormatParquetFormat = &shared.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat{ + DecimalAsFloat: decimalAsFloat, + Filetype: filetype7, + } + } + if sourceS3UpdateFileBasedStreamConfigFormatParquetFormat != nil { + format1 = &shared.SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatParquetFormat: sourceS3UpdateFileBasedStreamConfigFormatParquetFormat, + } + } + } + var globs []string = nil + for _, globsItem := range streamsItem.Globs { + globs = append(globs, globsItem.ValueString()) + } + inputSchema := new(string) + if !streamsItem.InputSchema.IsUnknown() && !streamsItem.InputSchema.IsNull() { + *inputSchema = streamsItem.InputSchema.ValueString() + } else { + inputSchema = nil + } + legacyPrefix := new(string) + if !streamsItem.LegacyPrefix.IsUnknown() && !streamsItem.LegacyPrefix.IsNull() { + *legacyPrefix = streamsItem.LegacyPrefix.ValueString() + } else { + legacyPrefix = nil + } + name := streamsItem.Name.ValueString() + primaryKey := new(string) + if !streamsItem.PrimaryKey.IsUnknown() && !streamsItem.PrimaryKey.IsNull() { + *primaryKey = streamsItem.PrimaryKey.ValueString() + } else { + primaryKey = nil + } + schemaless := new(bool) + if !streamsItem.Schemaless.IsUnknown() && !streamsItem.Schemaless.IsNull() { + *schemaless = streamsItem.Schemaless.ValueBool() + } else { + schemaless = nil + } + validationPolicy := new(shared.SourceS3UpdateFileBasedStreamConfigValidationPolicy) + if !streamsItem.ValidationPolicy.IsUnknown() && !streamsItem.ValidationPolicy.IsNull() { + *validationPolicy = shared.SourceS3UpdateFileBasedStreamConfigValidationPolicy(streamsItem.ValidationPolicy.ValueString()) + } else { + validationPolicy = nil + } + streams = append(streams, shared.SourceS3UpdateFileBasedStreamConfig{ + DaysToSyncIfHistoryIsFull: daysToSyncIfHistoryIsFull, + FileType: fileType, + Format: format1, + Globs: globs, + InputSchema: inputSchema, + LegacyPrefix: legacyPrefix, + Name: name, + PrimaryKey: primaryKey, + Schemaless: schemaless, + ValidationPolicy: validationPolicy, + }) } - provider := shared.SourceS3UpdateS3AmazonWebServices{ + configuration := shared.SourceS3Update{ AwsAccessKeyID: awsAccessKeyID, AwsSecretAccessKey: awsSecretAccessKey, Bucket: bucket, + Dataset: dataset, Endpoint: endpoint, - PathPrefix: pathPrefix, - StartDate: startDate, - } - schema := new(string) - if !r.Configuration.Schema.IsUnknown() && !r.Configuration.Schema.IsNull() { - *schema = r.Configuration.Schema.ValueString() - } else { - schema = nil - } - configuration := shared.SourceS3Update{ - Dataset: dataset, - Format: format, - PathPattern: pathPattern, - Provider: provider, - Schema: schema, + Format: format, + PathPattern: pathPattern, + Provider: provider, + Schema: schema, + StartDate: startDate1, + Streams: streams, } - name := r.Name.ValueString() + name1 := r.Name.ValueString() workspaceID := r.WorkspaceID.ValueString() out := shared.SourceS3PutRequest{ Configuration: configuration, - Name: name, + Name: name1, WorkspaceID: workspaceID, } return &out diff --git a/internal/provider/source_shopify_resource.go b/internal/provider/source_shopify_resource.go index 054a5b043..fe63a8404 100755 --- a/internal/provider/source_shopify_resource.go +++ b/internal/provider/source_shopify_resource.go @@ -170,7 +170,7 @@ func (r *SourceShopifyResource) Schema(ctx context.Context, req resource.SchemaR Description: `must be one of ["shopify"]`, }, "start_date": schema.StringAttribute{ - Required: true, + Optional: true, Validators: []validator.String{ validators.IsValidDate(), }, diff --git a/internal/provider/source_shopify_resource_sdk.go b/internal/provider/source_shopify_resource_sdk.go index 3f687ac5a..207b452f8 100755 --- a/internal/provider/source_shopify_resource_sdk.go +++ b/internal/provider/source_shopify_resource_sdk.go @@ -11,20 +11,6 @@ import ( func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCreateRequest { var credentials *shared.SourceShopifyShopifyAuthorizationMethod if r.Configuration.Credentials != nil { - var sourceShopifyShopifyAuthorizationMethodAPIPassword *shared.SourceShopifyShopifyAuthorizationMethodAPIPassword - if r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword != nil { - apiPassword := r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword.APIPassword.ValueString() - authMethod := shared.SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod(r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword.AuthMethod.ValueString()) - sourceShopifyShopifyAuthorizationMethodAPIPassword = &shared.SourceShopifyShopifyAuthorizationMethodAPIPassword{ - APIPassword: apiPassword, - AuthMethod: authMethod, - } - } - if sourceShopifyShopifyAuthorizationMethodAPIPassword != nil { - credentials = &shared.SourceShopifyShopifyAuthorizationMethod{ - SourceShopifyShopifyAuthorizationMethodAPIPassword: sourceShopifyShopifyAuthorizationMethodAPIPassword, - } - } var sourceShopifyShopifyAuthorizationMethodOAuth20 *shared.SourceShopifyShopifyAuthorizationMethodOAuth20 if r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20 != nil { accessToken := new(string) @@ -33,7 +19,7 @@ func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCrea } else { accessToken = nil } - authMethod1 := shared.SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.AuthMethod.ValueString()) + authMethod := shared.SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.AuthMethod.ValueString()) clientID := new(string) if !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientID.IsNull() { *clientID = r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodOAuth20.ClientID.ValueString() @@ -48,7 +34,7 @@ func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCrea } sourceShopifyShopifyAuthorizationMethodOAuth20 = &shared.SourceShopifyShopifyAuthorizationMethodOAuth20{ AccessToken: accessToken, - AuthMethod: authMethod1, + AuthMethod: authMethod, ClientID: clientID, ClientSecret: clientSecret, } @@ -58,10 +44,29 @@ func (r *SourceShopifyResourceModel) ToCreateSDKType() *shared.SourceShopifyCrea SourceShopifyShopifyAuthorizationMethodOAuth20: sourceShopifyShopifyAuthorizationMethodOAuth20, } } + var sourceShopifyShopifyAuthorizationMethodAPIPassword *shared.SourceShopifyShopifyAuthorizationMethodAPIPassword + if r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword != nil { + apiPassword := r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword.APIPassword.ValueString() + authMethod1 := shared.SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod(r.Configuration.Credentials.SourceShopifyShopifyAuthorizationMethodAPIPassword.AuthMethod.ValueString()) + sourceShopifyShopifyAuthorizationMethodAPIPassword = &shared.SourceShopifyShopifyAuthorizationMethodAPIPassword{ + APIPassword: apiPassword, + AuthMethod: authMethod1, + } + } + if sourceShopifyShopifyAuthorizationMethodAPIPassword != nil { + credentials = &shared.SourceShopifyShopifyAuthorizationMethod{ + SourceShopifyShopifyAuthorizationMethodAPIPassword: sourceShopifyShopifyAuthorizationMethodAPIPassword, + } + } } shop := r.Configuration.Shop.ValueString() sourceType := shared.SourceShopifyShopify(r.Configuration.SourceType.ValueString()) - startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString()) + startDate := new(customTypes.Date) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceShopify{ Credentials: credentials, Shop: shop, @@ -93,20 +98,6 @@ func (r *SourceShopifyResourceModel) ToGetSDKType() *shared.SourceShopifyCreateR func (r *SourceShopifyResourceModel) ToUpdateSDKType() *shared.SourceShopifyPutRequest { var credentials *shared.SourceShopifyUpdateShopifyAuthorizationMethod if r.Configuration.Credentials != nil { - var sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword - if r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil { - apiPassword := r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword.APIPassword.ValueString() - authMethod := shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod(r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword.AuthMethod.ValueString()) - sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword = &shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword{ - APIPassword: apiPassword, - AuthMethod: authMethod, - } - } - if sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil { - credentials = &shared.SourceShopifyUpdateShopifyAuthorizationMethod{ - SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword: sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword, - } - } var sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 *shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 if r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 != nil { accessToken := new(string) @@ -115,7 +106,7 @@ func (r *SourceShopifyResourceModel) ToUpdateSDKType() *shared.SourceShopifyPutR } else { accessToken = nil } - authMethod1 := shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.AuthMethod.ValueString()) + authMethod := shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.AuthMethod.ValueString()) clientID := new(string) if !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientID.IsUnknown() && !r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientID.IsNull() { *clientID = r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20.ClientID.ValueString() @@ -130,7 +121,7 @@ func (r *SourceShopifyResourceModel) ToUpdateSDKType() *shared.SourceShopifyPutR } sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 = &shared.SourceShopifyUpdateShopifyAuthorizationMethodOAuth20{ AccessToken: accessToken, - AuthMethod: authMethod1, + AuthMethod: authMethod, ClientID: clientID, ClientSecret: clientSecret, } @@ -140,9 +131,28 @@ func (r *SourceShopifyResourceModel) ToUpdateSDKType() *shared.SourceShopifyPutR SourceShopifyUpdateShopifyAuthorizationMethodOAuth20: sourceShopifyUpdateShopifyAuthorizationMethodOAuth20, } } + var sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword + if r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil { + apiPassword := r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword.APIPassword.ValueString() + authMethod1 := shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod(r.Configuration.Credentials.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword.AuthMethod.ValueString()) + sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword = &shared.SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword{ + APIPassword: apiPassword, + AuthMethod: authMethod1, + } + } + if sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword != nil { + credentials = &shared.SourceShopifyUpdateShopifyAuthorizationMethod{ + SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword: sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword, + } + } } shop := r.Configuration.Shop.ValueString() - startDate := customTypes.MustDateFromString(r.Configuration.StartDate.ValueString()) + startDate := new(customTypes.Date) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + startDate = customTypes.MustNewDateFromString(r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceShopifyUpdate{ Credentials: credentials, Shop: shop, diff --git a/internal/provider/source_stripe_data_source.go b/internal/provider/source_stripe_data_source.go index 6e38b0248..ddc9f603e 100755 --- a/internal/provider/source_stripe_data_source.go +++ b/internal/provider/source_stripe_data_source.go @@ -63,7 +63,7 @@ func (r *SourceStripeDataSource) Schema(ctx context.Context, req datasource.Sche }, "lookback_window_days": schema.Int64Attribute{ Computed: true, - Description: `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here`, + Description: `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here`, }, "slice_range": schema.Int64Attribute{ Computed: true, diff --git a/internal/provider/source_stripe_resource.go b/internal/provider/source_stripe_resource.go index 36fe3f961..0c0bbd3e6 100755 --- a/internal/provider/source_stripe_resource.go +++ b/internal/provider/source_stripe_resource.go @@ -65,7 +65,7 @@ func (r *SourceStripeResource) Schema(ctx context.Context, req resource.SchemaRe }, "lookback_window_days": schema.Int64Attribute{ Optional: true, - Description: `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here`, + Description: `When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here`, }, "slice_range": schema.Int64Attribute{ Optional: true, @@ -81,7 +81,7 @@ func (r *SourceStripeResource) Schema(ctx context.Context, req resource.SchemaRe Description: `must be one of ["stripe"]`, }, "start_date": schema.StringAttribute{ - Required: true, + Optional: true, Validators: []validator.String{ validators.IsRFC3339(), }, diff --git a/internal/provider/source_stripe_resource_sdk.go b/internal/provider/source_stripe_resource_sdk.go index adc4d413f..022bf185e 100755 --- a/internal/provider/source_stripe_resource_sdk.go +++ b/internal/provider/source_stripe_resource_sdk.go @@ -24,7 +24,12 @@ func (r *SourceStripeResourceModel) ToCreateSDKType() *shared.SourceStripeCreate sliceRange = nil } sourceType := shared.SourceStripeStripe(r.Configuration.SourceType.ValueString()) - startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + startDate := new(time.Time) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceStripe{ AccountID: accountID, ClientSecret: clientSecret, @@ -70,7 +75,12 @@ func (r *SourceStripeResourceModel) ToUpdateSDKType() *shared.SourceStripePutReq } else { sliceRange = nil } - startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + startDate := new(time.Time) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } configuration := shared.SourceStripeUpdate{ AccountID: accountID, ClientSecret: clientSecret, diff --git a/internal/provider/source_zendesksunshine_data_source.go b/internal/provider/source_zendesksunshine_data_source.go index 41a7f1629..03bad6f4a 100755 --- a/internal/provider/source_zendesksunshine_data_source.go +++ b/internal/provider/source_zendesksunshine_data_source.go @@ -32,11 +32,11 @@ type SourceZendeskSunshineDataSource struct { // SourceZendeskSunshineDataSourceModel describes the data model. type SourceZendeskSunshineDataSourceModel struct { - Configuration SourceZendeskSunshine1 `tfsdk:"configuration"` - Name types.String `tfsdk:"name"` - SecretID types.String `tfsdk:"secret_id"` - SourceID types.String `tfsdk:"source_id"` - WorkspaceID types.String `tfsdk:"workspace_id"` + Configuration SourceZendeskSunshine `tfsdk:"configuration"` + Name types.String `tfsdk:"name"` + SecretID types.String `tfsdk:"secret_id"` + SourceID types.String `tfsdk:"source_id"` + WorkspaceID types.String `tfsdk:"workspace_id"` } // Metadata returns the data source type name. @@ -76,13 +76,6 @@ func (r *SourceZendeskSunshineDataSource) Schema(ctx context.Context, req dataso Computed: true, Description: `The user email for your Zendesk account`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, "source_zendesk_sunshine_authorization_method_o_auth2_0": schema.SingleNestedAttribute{ @@ -109,13 +102,6 @@ func (r *SourceZendeskSunshineDataSource) Schema(ctx context.Context, req dataso Computed: true, Description: `The Client Secret of your OAuth application.`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, "source_zendesk_sunshine_update_authorization_method_api_token": schema.SingleNestedAttribute{ @@ -138,13 +124,6 @@ func (r *SourceZendeskSunshineDataSource) Schema(ctx context.Context, req dataso Computed: true, Description: `The user email for your Zendesk account`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, "source_zendesk_sunshine_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{ @@ -171,13 +150,6 @@ func (r *SourceZendeskSunshineDataSource) Schema(ctx context.Context, req dataso Computed: true, Description: `The Client Secret of your OAuth application.`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, }, @@ -195,7 +167,10 @@ func (r *SourceZendeskSunshineDataSource) Schema(ctx context.Context, req dataso Description: `must be one of ["zendesk-sunshine"]`, }, "start_date": schema.StringAttribute{ - Computed: true, + Computed: true, + Validators: []validator.String{ + validators.IsRFC3339(), + }, Description: `The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.`, }, "subdomain": schema.StringAttribute{ diff --git a/internal/provider/source_zendesksunshine_resource.go b/internal/provider/source_zendesksunshine_resource.go index b7b15463b..f31c86da9 100755 --- a/internal/provider/source_zendesksunshine_resource.go +++ b/internal/provider/source_zendesksunshine_resource.go @@ -78,13 +78,6 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource Required: true, Description: `The user email for your Zendesk account`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, "source_zendesk_sunshine_authorization_method_o_auth2_0": schema.SingleNestedAttribute{ @@ -111,13 +104,6 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource Required: true, Description: `The Client Secret of your OAuth application.`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, "source_zendesk_sunshine_update_authorization_method_api_token": schema.SingleNestedAttribute{ @@ -140,13 +126,6 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource Required: true, Description: `The user email for your Zendesk account`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, "source_zendesk_sunshine_update_authorization_method_o_auth2_0": schema.SingleNestedAttribute{ @@ -173,13 +152,6 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource Required: true, Description: `The Client Secret of your OAuth application.`, }, - "additional_properties": schema.StringAttribute{ - Optional: true, - Validators: []validator.String{ - validators.IsValidJSON(), - }, - Description: `Parsed as JSON.`, - }, }, }, }, @@ -197,7 +169,10 @@ func (r *SourceZendeskSunshineResource) Schema(ctx context.Context, req resource Description: `must be one of ["zendesk-sunshine"]`, }, "start_date": schema.StringAttribute{ - Required: true, + Required: true, + Validators: []validator.String{ + validators.IsRFC3339(), + }, Description: `The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z.`, }, "subdomain": schema.StringAttribute{ diff --git a/internal/provider/source_zendesksunshine_resource_sdk.go b/internal/provider/source_zendesksunshine_resource_sdk.go index 56dc1f10b..10857e20c 100755 --- a/internal/provider/source_zendesksunshine_resource_sdk.go +++ b/internal/provider/source_zendesksunshine_resource_sdk.go @@ -4,8 +4,8 @@ package provider import ( "airbyte/internal/sdk/pkg/models/shared" - "encoding/json" "github.com/hashicorp/terraform-plugin-framework/types" + "time" ) func (r *SourceZendeskSunshineResourceModel) ToCreateSDKType() *shared.SourceZendeskSunshineCreateRequest { @@ -17,16 +17,11 @@ func (r *SourceZendeskSunshineResourceModel) ToCreateSDKType() *shared.SourceZen authMethod := shared.SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.AuthMethod.ValueString()) clientID := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.ClientID.ValueString() clientSecret := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.ClientSecret.ValueString() - var additionalProperties interface{} - if !r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.AdditionalProperties.IsNull() { - _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodOAuth20.AdditionalProperties.ValueString()), &additionalProperties) - } sourceZendeskSunshineAuthorizationMethodOAuth20 = &shared.SourceZendeskSunshineAuthorizationMethodOAuth20{ - AccessToken: accessToken, - AuthMethod: authMethod, - ClientID: clientID, - ClientSecret: clientSecret, - AdditionalProperties: additionalProperties, + AccessToken: accessToken, + AuthMethod: authMethod, + ClientID: clientID, + ClientSecret: clientSecret, } } if sourceZendeskSunshineAuthorizationMethodOAuth20 != nil { @@ -39,15 +34,10 @@ func (r *SourceZendeskSunshineResourceModel) ToCreateSDKType() *shared.SourceZen apiToken := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.APIToken.ValueString() authMethod1 := shared.SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod(r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.AuthMethod.ValueString()) email := r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.Email.ValueString() - var additionalProperties1 interface{} - if !r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.AdditionalProperties.IsNull() { - _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSunshineAuthorizationMethodAPIToken.AdditionalProperties.ValueString()), &additionalProperties1) - } sourceZendeskSunshineAuthorizationMethodAPIToken = &shared.SourceZendeskSunshineAuthorizationMethodAPIToken{ - APIToken: apiToken, - AuthMethod: authMethod1, - Email: email, - AdditionalProperties: additionalProperties1, + APIToken: apiToken, + AuthMethod: authMethod1, + Email: email, } } if sourceZendeskSunshineAuthorizationMethodAPIToken != nil { @@ -57,7 +47,7 @@ func (r *SourceZendeskSunshineResourceModel) ToCreateSDKType() *shared.SourceZen } } sourceType := shared.SourceZendeskSunshineZendeskSunshine(r.Configuration.SourceType.ValueString()) - startDate := r.Configuration.StartDate.ValueString() + startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) subdomain := r.Configuration.Subdomain.ValueString() configuration := shared.SourceZendeskSunshine{ Credentials: credentials, @@ -96,16 +86,11 @@ func (r *SourceZendeskSunshineResourceModel) ToUpdateSDKType() *shared.SourceZen authMethod := shared.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod(r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.AuthMethod.ValueString()) clientID := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.ClientID.ValueString() clientSecret := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.ClientSecret.ValueString() - var additionalProperties interface{} - if !r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.AdditionalProperties.IsNull() { - _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20.AdditionalProperties.ValueString()), &additionalProperties) - } sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 = &shared.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20{ - AccessToken: accessToken, - AuthMethod: authMethod, - ClientID: clientID, - ClientSecret: clientSecret, - AdditionalProperties: additionalProperties, + AccessToken: accessToken, + AuthMethod: authMethod, + ClientID: clientID, + ClientSecret: clientSecret, } } if sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 != nil { @@ -118,15 +103,10 @@ func (r *SourceZendeskSunshineResourceModel) ToUpdateSDKType() *shared.SourceZen apiToken := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.APIToken.ValueString() authMethod1 := shared.SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod(r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.AuthMethod.ValueString()) email := r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.Email.ValueString() - var additionalProperties1 interface{} - if !r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.AdditionalProperties.IsUnknown() && !r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.AdditionalProperties.IsNull() { - _ = json.Unmarshal([]byte(r.Configuration.Credentials.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken.AdditionalProperties.ValueString()), &additionalProperties1) - } sourceZendeskSunshineUpdateAuthorizationMethodAPIToken = &shared.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken{ - APIToken: apiToken, - AuthMethod: authMethod1, - Email: email, - AdditionalProperties: additionalProperties1, + APIToken: apiToken, + AuthMethod: authMethod1, + Email: email, } } if sourceZendeskSunshineUpdateAuthorizationMethodAPIToken != nil { @@ -135,7 +115,7 @@ func (r *SourceZendeskSunshineResourceModel) ToUpdateSDKType() *shared.SourceZen } } } - startDate := r.Configuration.StartDate.ValueString() + startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) subdomain := r.Configuration.Subdomain.ValueString() configuration := shared.SourceZendeskSunshineUpdate{ Credentials: credentials, diff --git a/internal/provider/source_zendesksupport_resource.go b/internal/provider/source_zendesksupport_resource.go index 0c6091e42..6d0d5a8b6 100755 --- a/internal/provider/source_zendesksupport_resource.go +++ b/internal/provider/source_zendesksupport_resource.go @@ -206,7 +206,7 @@ func (r *SourceZendeskSupportResource) Schema(ctx context.Context, req resource. Description: `must be one of ["zendesk-support"]`, }, "start_date": schema.StringAttribute{ - Required: true, + Optional: true, Validators: []validator.String{ validators.IsRFC3339(), }, diff --git a/internal/provider/source_zendesksupport_resource_sdk.go b/internal/provider/source_zendesksupport_resource_sdk.go index d39aa3e57..2d8b9695b 100755 --- a/internal/provider/source_zendesksupport_resource_sdk.go +++ b/internal/provider/source_zendesksupport_resource_sdk.go @@ -84,7 +84,12 @@ func (r *SourceZendeskSupportResourceModel) ToCreateSDKType() *shared.SourceZend ignorePagination = nil } sourceType := shared.SourceZendeskSupportZendeskSupport(r.Configuration.SourceType.ValueString()) - startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + startDate := new(time.Time) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } subdomain := r.Configuration.Subdomain.ValueString() configuration := shared.SourceZendeskSupport{ Credentials: credentials, @@ -189,7 +194,12 @@ func (r *SourceZendeskSupportResourceModel) ToUpdateSDKType() *shared.SourceZend } else { ignorePagination = nil } - startDate, _ := time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + startDate := new(time.Time) + if !r.Configuration.StartDate.IsUnknown() && !r.Configuration.StartDate.IsNull() { + *startDate, _ = time.Parse(time.RFC3339Nano, r.Configuration.StartDate.ValueString()) + } else { + startDate = nil + } subdomain := r.Configuration.Subdomain.ValueString() configuration := shared.SourceZendeskSupportUpdate{ Credentials: credentials, diff --git a/internal/provider/type_destination_bigquery.go b/internal/provider/type_destination_bigquery.go index 83b7b9cd0..09022d87a 100755 --- a/internal/provider/type_destination_bigquery.go +++ b/internal/provider/type_destination_bigquery.go @@ -14,5 +14,4 @@ type DestinationBigquery struct { ProjectID types.String `tfsdk:"project_id"` RawDataDataset types.String `tfsdk:"raw_data_dataset"` TransformationPriority types.String `tfsdk:"transformation_priority"` - Use1s1tFormat types.Bool `tfsdk:"use_1s1t_format"` } diff --git a/internal/provider/type_destination_milvus.go b/internal/provider/type_destination_milvus.go new file mode 100755 index 000000000..3d3ea9fdf --- /dev/null +++ b/internal/provider/type_destination_milvus.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvus struct { + DestinationType types.String `tfsdk:"destination_type"` + Embedding DestinationMilvusEmbedding `tfsdk:"embedding"` + Indexing DestinationMilvusIndexing `tfsdk:"indexing"` + Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"` +} diff --git a/internal/provider/type_destination_milvus_embedding.go b/internal/provider/type_destination_milvus_embedding.go new file mode 100755 index 000000000..d282f648f --- /dev/null +++ b/internal/provider/type_destination_milvus_embedding.go @@ -0,0 +1,14 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type DestinationMilvusEmbedding struct { + DestinationMilvusEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_milvus_embedding_cohere"` + DestinationMilvusEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_milvus_embedding_fake"` + DestinationMilvusEmbeddingFromField *DestinationMilvusEmbeddingFromField `tfsdk:"destination_milvus_embedding_from_field"` + DestinationMilvusEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_milvus_embedding_open_ai"` + DestinationMilvusUpdateEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_milvus_update_embedding_cohere"` + DestinationMilvusUpdateEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_milvus_update_embedding_fake"` + DestinationMilvusUpdateEmbeddingFromField *DestinationMilvusEmbeddingFromField `tfsdk:"destination_milvus_update_embedding_from_field"` + DestinationMilvusUpdateEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_milvus_update_embedding_open_ai"` +} diff --git a/internal/provider/type_destination_milvus_embedding_cohere.go b/internal/provider/type_destination_milvus_embedding_cohere.go new file mode 100755 index 000000000..5c0b2141d --- /dev/null +++ b/internal/provider/type_destination_milvus_embedding_cohere.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvusEmbeddingCohere struct { + CohereKey types.String `tfsdk:"cohere_key"` + Mode types.String `tfsdk:"mode"` +} diff --git a/internal/provider/type_destination_milvus_embedding_from_field.go b/internal/provider/type_destination_milvus_embedding_from_field.go new file mode 100755 index 000000000..b06084e93 --- /dev/null +++ b/internal/provider/type_destination_milvus_embedding_from_field.go @@ -0,0 +1,11 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvusEmbeddingFromField struct { + Dimensions types.Int64 `tfsdk:"dimensions"` + FieldName types.String `tfsdk:"field_name"` + Mode types.String `tfsdk:"mode"` +} diff --git a/internal/provider/type_destination_milvus_indexing.go b/internal/provider/type_destination_milvus_indexing.go new file mode 100755 index 000000000..6aca07861 --- /dev/null +++ b/internal/provider/type_destination_milvus_indexing.go @@ -0,0 +1,14 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvusIndexing struct { + Auth DestinationMilvusIndexingAuthentication `tfsdk:"auth"` + Collection types.String `tfsdk:"collection"` + Db types.String `tfsdk:"db"` + Host types.String `tfsdk:"host"` + TextField types.String `tfsdk:"text_field"` + VectorField types.String `tfsdk:"vector_field"` +} diff --git a/internal/provider/type_destination_milvus_indexing_authentication.go b/internal/provider/type_destination_milvus_indexing_authentication.go new file mode 100755 index 000000000..260541485 --- /dev/null +++ b/internal/provider/type_destination_milvus_indexing_authentication.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type DestinationMilvusIndexingAuthentication struct { + DestinationMilvusIndexingAuthenticationAPIToken *DestinationMilvusIndexingAuthenticationAPIToken `tfsdk:"destination_milvus_indexing_authentication_api_token"` + DestinationMilvusIndexingAuthenticationNoAuth *DestinationMilvusIndexingAuthenticationNoAuth `tfsdk:"destination_milvus_indexing_authentication_no_auth"` + DestinationMilvusIndexingAuthenticationUsernamePassword *DestinationMilvusIndexingAuthenticationUsernamePassword `tfsdk:"destination_milvus_indexing_authentication_username_password"` + DestinationMilvusUpdateIndexingAuthenticationAPIToken *DestinationMilvusIndexingAuthenticationAPIToken `tfsdk:"destination_milvus_update_indexing_authentication_api_token"` + DestinationMilvusUpdateIndexingAuthenticationNoAuth *DestinationMilvusIndexingAuthenticationNoAuth `tfsdk:"destination_milvus_update_indexing_authentication_no_auth"` + DestinationMilvusUpdateIndexingAuthenticationUsernamePassword *DestinationMilvusIndexingAuthenticationUsernamePassword `tfsdk:"destination_milvus_update_indexing_authentication_username_password"` +} diff --git a/internal/provider/type_destination_milvus_indexing_authentication_api_token.go b/internal/provider/type_destination_milvus_indexing_authentication_api_token.go new file mode 100755 index 000000000..a49094f16 --- /dev/null +++ b/internal/provider/type_destination_milvus_indexing_authentication_api_token.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvusIndexingAuthenticationAPIToken struct { + Mode types.String `tfsdk:"mode"` + Token types.String `tfsdk:"token"` +} diff --git a/internal/provider/type_source_public_apis.go b/internal/provider/type_destination_milvus_indexing_authentication_no_auth.go similarity index 62% rename from internal/provider/type_source_public_apis.go rename to internal/provider/type_destination_milvus_indexing_authentication_no_auth.go index b277bca82..507bdc6cf 100755 --- a/internal/provider/type_source_public_apis.go +++ b/internal/provider/type_destination_milvus_indexing_authentication_no_auth.go @@ -4,6 +4,6 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourcePublicApis struct { - SourceType types.String `tfsdk:"source_type"` +type DestinationMilvusIndexingAuthenticationNoAuth struct { + Mode types.String `tfsdk:"mode"` } diff --git a/internal/provider/type_destination_milvus_indexing_authentication_username_password.go b/internal/provider/type_destination_milvus_indexing_authentication_username_password.go new file mode 100755 index 000000000..ffea134d1 --- /dev/null +++ b/internal/provider/type_destination_milvus_indexing_authentication_username_password.go @@ -0,0 +1,11 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvusIndexingAuthenticationUsernamePassword struct { + Mode types.String `tfsdk:"mode"` + Password types.String `tfsdk:"password"` + Username types.String `tfsdk:"username"` +} diff --git a/internal/provider/type_destination_milvus_processing_config_model.go b/internal/provider/type_destination_milvus_processing_config_model.go new file mode 100755 index 000000000..4c58a7c62 --- /dev/null +++ b/internal/provider/type_destination_milvus_processing_config_model.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationMilvusProcessingConfigModel struct { + ChunkOverlap types.Int64 `tfsdk:"chunk_overlap"` + ChunkSize types.Int64 `tfsdk:"chunk_size"` + MetadataFields []types.String `tfsdk:"metadata_fields"` + TextFields []types.String `tfsdk:"text_fields"` +} diff --git a/internal/provider/type_destination_pinecone.go b/internal/provider/type_destination_pinecone.go new file mode 100755 index 000000000..64eb170f3 --- /dev/null +++ b/internal/provider/type_destination_pinecone.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationPinecone struct { + DestinationType types.String `tfsdk:"destination_type"` + Embedding DestinationPineconeEmbedding `tfsdk:"embedding"` + Indexing DestinationPineconeIndexing `tfsdk:"indexing"` + Processing DestinationMilvusProcessingConfigModel `tfsdk:"processing"` +} diff --git a/internal/provider/type_destination_pinecone_embedding.go b/internal/provider/type_destination_pinecone_embedding.go new file mode 100755 index 000000000..abec7c195 --- /dev/null +++ b/internal/provider/type_destination_pinecone_embedding.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type DestinationPineconeEmbedding struct { + DestinationPineconeEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_pinecone_embedding_cohere"` + DestinationPineconeEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_pinecone_embedding_fake"` + DestinationPineconeEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_pinecone_embedding_open_ai"` + DestinationPineconeUpdateEmbeddingCohere *DestinationMilvusEmbeddingCohere `tfsdk:"destination_pinecone_update_embedding_cohere"` + DestinationPineconeUpdateEmbeddingFake *DestinationLangchainEmbeddingFake `tfsdk:"destination_pinecone_update_embedding_fake"` + DestinationPineconeUpdateEmbeddingOpenAI *DestinationLangchainEmbeddingOpenAI `tfsdk:"destination_pinecone_update_embedding_open_ai"` +} diff --git a/internal/provider/type_destination_pinecone_indexing.go b/internal/provider/type_destination_pinecone_indexing.go new file mode 100755 index 000000000..d13f6ba4a --- /dev/null +++ b/internal/provider/type_destination_pinecone_indexing.go @@ -0,0 +1,11 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type DestinationPineconeIndexing struct { + Index types.String `tfsdk:"index"` + PineconeEnvironment types.String `tfsdk:"pinecone_environment"` + PineconeKey types.String `tfsdk:"pinecone_key"` +} diff --git a/internal/provider/type_destination_snowflake.go b/internal/provider/type_destination_snowflake.go index c221efdd0..1700af440 100755 --- a/internal/provider/type_destination_snowflake.go +++ b/internal/provider/type_destination_snowflake.go @@ -13,7 +13,6 @@ type DestinationSnowflake struct { RawDataSchema types.String `tfsdk:"raw_data_schema"` Role types.String `tfsdk:"role"` Schema types.String `tfsdk:"schema"` - Use1s1tFormat types.Bool `tfsdk:"use_1s1t_format"` Username types.String `tfsdk:"username"` Warehouse types.String `tfsdk:"warehouse"` } diff --git a/internal/provider/type_destination_typesense.go b/internal/provider/type_destination_typesense.go index 37df54aa0..c895f570a 100755 --- a/internal/provider/type_destination_typesense.go +++ b/internal/provider/type_destination_typesense.go @@ -6,7 +6,7 @@ import "github.com/hashicorp/terraform-plugin-framework/types" type DestinationTypesense struct { APIKey types.String `tfsdk:"api_key"` - BatchSize types.String `tfsdk:"batch_size"` + BatchSize types.Int64 `tfsdk:"batch_size"` DestinationType types.String `tfsdk:"destination_type"` Host types.String `tfsdk:"host"` Port types.String `tfsdk:"port"` diff --git a/internal/provider/type_source_amazon_ads.go b/internal/provider/type_source_amazon_ads.go index 3fa93ba52..4c7b03712 100755 --- a/internal/provider/type_source_amazon_ads.go +++ b/internal/provider/type_source_amazon_ads.go @@ -9,6 +9,7 @@ type SourceAmazonAds struct { ClientID types.String `tfsdk:"client_id"` ClientSecret types.String `tfsdk:"client_secret"` LookBackWindow types.Int64 `tfsdk:"look_back_window"` + MarketplaceIds []types.String `tfsdk:"marketplace_ids"` Profiles []types.Int64 `tfsdk:"profiles"` RefreshToken types.String `tfsdk:"refresh_token"` Region types.String `tfsdk:"region"` diff --git a/internal/provider/type_source_apify_dataset.go b/internal/provider/type_source_apify_dataset.go index 2cf2d7293..4f958a86e 100755 --- a/internal/provider/type_source_apify_dataset.go +++ b/internal/provider/type_source_apify_dataset.go @@ -8,4 +8,5 @@ type SourceApifyDataset struct { Clean types.Bool `tfsdk:"clean"` DatasetID types.String `tfsdk:"dataset_id"` SourceType types.String `tfsdk:"source_type"` + Token types.String `tfsdk:"token"` } diff --git a/internal/provider/type_source_auth0.go b/internal/provider/type_source_auth0.go index f308d5c63..70589a65e 100755 --- a/internal/provider/type_source_auth0.go +++ b/internal/provider/type_source_auth0.go @@ -8,4 +8,5 @@ type SourceAuth0 struct { BaseURL types.String `tfsdk:"base_url"` Credentials SourceAuth0AuthenticationMethod `tfsdk:"credentials"` SourceType types.String `tfsdk:"source_type"` + StartDate types.String `tfsdk:"start_date"` } diff --git a/internal/provider/type_source_datadog.go b/internal/provider/type_source_datadog.go deleted file mode 100755 index 67a360fc7..000000000 --- a/internal/provider/type_source_datadog.go +++ /dev/null @@ -1,17 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceDatadog struct { - APIKey types.String `tfsdk:"api_key"` - ApplicationKey types.String `tfsdk:"application_key"` - EndDate types.String `tfsdk:"end_date"` - MaxRecordsPerRequest types.Int64 `tfsdk:"max_records_per_request"` - Queries []SourceDatadogQueries `tfsdk:"queries"` - Query types.String `tfsdk:"query"` - Site types.String `tfsdk:"site"` - SourceType types.String `tfsdk:"source_type"` - StartDate types.String `tfsdk:"start_date"` -} diff --git a/internal/provider/type_source_datadog_queries.go b/internal/provider/type_source_datadog_queries.go deleted file mode 100755 index f75a7ee92..000000000 --- a/internal/provider/type_source_datadog_queries.go +++ /dev/null @@ -1,11 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceDatadogQueries struct { - DataSource types.String `tfsdk:"data_source"` - Name types.String `tfsdk:"name"` - Query types.String `tfsdk:"query"` -} diff --git a/internal/provider/type_source_google_search_console.go b/internal/provider/type_source_google_search_console.go index f8e3e23e9..802b69795 100755 --- a/internal/provider/type_source_google_search_console.go +++ b/internal/provider/type_source_google_search_console.go @@ -5,11 +5,12 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourceGoogleSearchConsole struct { - Authorization SourceGoogleSearchConsoleAuthenticationType `tfsdk:"authorization"` - CustomReports types.String `tfsdk:"custom_reports"` - DataState types.String `tfsdk:"data_state"` - EndDate types.String `tfsdk:"end_date"` - SiteUrls []types.String `tfsdk:"site_urls"` - SourceType types.String `tfsdk:"source_type"` - StartDate types.String `tfsdk:"start_date"` + Authorization SourceGoogleSearchConsoleAuthenticationType `tfsdk:"authorization"` + CustomReports types.String `tfsdk:"custom_reports"` + CustomReportsArray []SourceGoogleSearchConsoleCustomReportConfig `tfsdk:"custom_reports_array"` + DataState types.String `tfsdk:"data_state"` + EndDate types.String `tfsdk:"end_date"` + SiteUrls []types.String `tfsdk:"site_urls"` + SourceType types.String `tfsdk:"source_type"` + StartDate types.String `tfsdk:"start_date"` } diff --git a/internal/provider/type_source_google_search_console_custom_report_config.go b/internal/provider/type_source_google_search_console_custom_report_config.go new file mode 100755 index 000000000..f56387827 --- /dev/null +++ b/internal/provider/type_source_google_search_console_custom_report_config.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceGoogleSearchConsoleCustomReportConfig struct { + Dimensions []types.String `tfsdk:"dimensions"` + Name types.String `tfsdk:"name"` +} diff --git a/internal/provider/type_source_google_sheets.go b/internal/provider/type_source_google_sheets.go index d3c57c986..72c0282b1 100755 --- a/internal/provider/type_source_google_sheets.go +++ b/internal/provider/type_source_google_sheets.go @@ -7,7 +7,6 @@ import "github.com/hashicorp/terraform-plugin-framework/types" type SourceGoogleSheets struct { Credentials SourceGoogleSheetsAuthentication `tfsdk:"credentials"` NamesConversion types.Bool `tfsdk:"names_conversion"` - RowBatchSize types.Int64 `tfsdk:"row_batch_size"` SourceType types.String `tfsdk:"source_type"` SpreadsheetID types.String `tfsdk:"spreadsheet_id"` } diff --git a/internal/provider/type_source_intercom.go b/internal/provider/type_source_intercom.go index 020851e18..e1166b90f 100755 --- a/internal/provider/type_source_intercom.go +++ b/internal/provider/type_source_intercom.go @@ -5,7 +5,9 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourceIntercom struct { - AccessToken types.String `tfsdk:"access_token"` - SourceType types.String `tfsdk:"source_type"` - StartDate types.String `tfsdk:"start_date"` + AccessToken types.String `tfsdk:"access_token"` + ClientID types.String `tfsdk:"client_id"` + ClientSecret types.String `tfsdk:"client_secret"` + SourceType types.String `tfsdk:"source_type"` + StartDate types.String `tfsdk:"start_date"` } diff --git a/internal/provider/type_source_mssql.go b/internal/provider/type_source_mssql.go index 13e5e9cc1..164d63031 100755 --- a/internal/provider/type_source_mssql.go +++ b/internal/provider/type_source_mssql.go @@ -5,15 +5,15 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourceMssql struct { - Database types.String `tfsdk:"database"` - Host types.String `tfsdk:"host"` - JdbcURLParams types.String `tfsdk:"jdbc_url_params"` - Password types.String `tfsdk:"password"` - Port types.Int64 `tfsdk:"port"` - ReplicationMethod *SourceMssqlReplicationMethod `tfsdk:"replication_method"` - Schemas []types.String `tfsdk:"schemas"` - SourceType types.String `tfsdk:"source_type"` - SslMethod *SourceMssqlSSLMethod `tfsdk:"ssl_method"` - TunnelMethod *SourceMssqlSSHTunnelMethod `tfsdk:"tunnel_method"` - Username types.String `tfsdk:"username"` + Database types.String `tfsdk:"database"` + Host types.String `tfsdk:"host"` + JdbcURLParams types.String `tfsdk:"jdbc_url_params"` + Password types.String `tfsdk:"password"` + Port types.Int64 `tfsdk:"port"` + ReplicationMethod *SourceMssqlUpdateMethod `tfsdk:"replication_method"` + Schemas []types.String `tfsdk:"schemas"` + SourceType types.String `tfsdk:"source_type"` + SslMethod *SourceMssqlSSLMethod `tfsdk:"ssl_method"` + TunnelMethod *SourceMssqlSSHTunnelMethod `tfsdk:"tunnel_method"` + Username types.String `tfsdk:"username"` } diff --git a/internal/provider/type_source_mssql_replication_method.go b/internal/provider/type_source_mssql_replication_method.go deleted file mode 100755 index d07db8210..000000000 --- a/internal/provider/type_source_mssql_replication_method.go +++ /dev/null @@ -1,10 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -type SourceMssqlReplicationMethod struct { - SourceMssqlReplicationMethodLogicalReplicationCDC *SourceMssqlReplicationMethodLogicalReplicationCDC `tfsdk:"source_mssql_replication_method_logical_replication_cdc"` - SourceMssqlReplicationMethodStandard *SourceMssqlReplicationMethodStandard `tfsdk:"source_mssql_replication_method_standard"` - SourceMssqlUpdateReplicationMethodLogicalReplicationCDC *SourceMssqlReplicationMethodLogicalReplicationCDC `tfsdk:"source_mssql_update_replication_method_logical_replication_cdc"` - SourceMssqlUpdateReplicationMethodStandard *SourceMssqlReplicationMethodStandard `tfsdk:"source_mssql_update_replication_method_standard"` -} diff --git a/internal/provider/type_source_mssql_update_method.go b/internal/provider/type_source_mssql_update_method.go new file mode 100755 index 000000000..89e5f0546 --- /dev/null +++ b/internal/provider/type_source_mssql_update_method.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type SourceMssqlUpdateMethod struct { + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC `tfsdk:"source_mssql_update_method_read_changes_using_change_data_capture_cdc"` + SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mssql_update_method_scan_changes_with_user_defined_cursor"` + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC `tfsdk:"source_mssql_update_update_method_read_changes_using_change_data_capture_cdc"` + SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mssql_update_update_method_scan_changes_with_user_defined_cursor"` +} diff --git a/internal/provider/type_source_mssql_replication_method_logical_replication_cdc.go b/internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go similarity index 84% rename from internal/provider/type_source_mssql_replication_method_logical_replication_cdc.go rename to internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go index d33cbc9d6..605edb04d 100755 --- a/internal/provider/type_source_mssql_replication_method_logical_replication_cdc.go +++ b/internal/provider/type_source_mssql_update_method_read_changes_using_change_data_capture_cdc.go @@ -4,7 +4,7 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourceMssqlReplicationMethodLogicalReplicationCDC struct { +type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC struct { DataToSync types.String `tfsdk:"data_to_sync"` InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"` Method types.String `tfsdk:"method"` diff --git a/internal/provider/type_source_mysql_update_method_scan_changes_with_user_defined_cursor.go b/internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go similarity index 75% rename from internal/provider/type_source_mysql_update_method_scan_changes_with_user_defined_cursor.go rename to internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go index 541f1c3c1..bfc9a2d02 100755 --- a/internal/provider/type_source_mysql_update_method_scan_changes_with_user_defined_cursor.go +++ b/internal/provider/type_source_mssql_update_method_scan_changes_with_user_defined_cursor.go @@ -4,6 +4,6 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor struct { +type SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor struct { Method types.String `tfsdk:"method"` } diff --git a/internal/provider/type_source_mysql_update_method.go b/internal/provider/type_source_mysql_update_method.go index 10e0ba305..0285025fd 100755 --- a/internal/provider/type_source_mysql_update_method.go +++ b/internal/provider/type_source_mysql_update_method.go @@ -4,7 +4,7 @@ package provider type SourceMysqlUpdateMethod struct { SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC `tfsdk:"source_mysql_update_method_read_changes_using_binary_log_cdc"` - SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mysql_update_method_scan_changes_with_user_defined_cursor"` + SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mysql_update_method_scan_changes_with_user_defined_cursor"` SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC *SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC `tfsdk:"source_mysql_update_update_method_read_changes_using_binary_log_cdc"` - SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mysql_update_update_method_scan_changes_with_user_defined_cursor"` + SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_mysql_update_update_method_scan_changes_with_user_defined_cursor"` } diff --git a/internal/provider/type_source_openweather.go b/internal/provider/type_source_openweather.go deleted file mode 100755 index 00ebbfe59..000000000 --- a/internal/provider/type_source_openweather.go +++ /dev/null @@ -1,14 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceOpenweather struct { - Appid types.String `tfsdk:"appid"` - Lang types.String `tfsdk:"lang"` - Lat types.String `tfsdk:"lat"` - Lon types.String `tfsdk:"lon"` - SourceType types.String `tfsdk:"source_type"` - Units types.String `tfsdk:"units"` -} diff --git a/internal/provider/type_source_postgres.go b/internal/provider/type_source_postgres.go index cbe7c4c76..3ca85ad2e 100755 --- a/internal/provider/type_source_postgres.go +++ b/internal/provider/type_source_postgres.go @@ -5,15 +5,15 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourcePostgres struct { - Database types.String `tfsdk:"database"` - Host types.String `tfsdk:"host"` - JdbcURLParams types.String `tfsdk:"jdbc_url_params"` - Password types.String `tfsdk:"password"` - Port types.Int64 `tfsdk:"port"` - ReplicationMethod *SourcePostgresReplicationMethod `tfsdk:"replication_method"` - Schemas []types.String `tfsdk:"schemas"` - SourceType types.String `tfsdk:"source_type"` - SslMode *SourcePostgresSSLModes `tfsdk:"ssl_mode"` - TunnelMethod *SourcePostgresSSHTunnelMethod `tfsdk:"tunnel_method"` - Username types.String `tfsdk:"username"` + Database types.String `tfsdk:"database"` + Host types.String `tfsdk:"host"` + JdbcURLParams types.String `tfsdk:"jdbc_url_params"` + Password types.String `tfsdk:"password"` + Port types.Int64 `tfsdk:"port"` + ReplicationMethod *SourcePostgresUpdateMethod `tfsdk:"replication_method"` + Schemas []types.String `tfsdk:"schemas"` + SourceType types.String `tfsdk:"source_type"` + SslMode *SourcePostgresSSLModes `tfsdk:"ssl_mode"` + TunnelMethod *SourcePostgresSSHTunnelMethod `tfsdk:"tunnel_method"` + Username types.String `tfsdk:"username"` } diff --git a/internal/provider/type_source_postgres1.go b/internal/provider/type_source_postgres1.go index 45733477b..c9deba721 100755 --- a/internal/provider/type_source_postgres1.go +++ b/internal/provider/type_source_postgres1.go @@ -5,15 +5,15 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourcePostgres1 struct { - Database types.String `tfsdk:"database"` - Host types.String `tfsdk:"host"` - JdbcURLParams types.String `tfsdk:"jdbc_url_params"` - Password types.String `tfsdk:"password"` - Port types.Int64 `tfsdk:"port"` - ReplicationMethod *SourcePostgresReplicationMethod1 `tfsdk:"replication_method"` - Schemas []types.String `tfsdk:"schemas"` - SourceType types.String `tfsdk:"source_type"` - SslMode *SourcePostgresSSLModes1 `tfsdk:"ssl_mode"` - TunnelMethod *SourcePostgresSSHTunnelMethod `tfsdk:"tunnel_method"` - Username types.String `tfsdk:"username"` + Database types.String `tfsdk:"database"` + Host types.String `tfsdk:"host"` + JdbcURLParams types.String `tfsdk:"jdbc_url_params"` + Password types.String `tfsdk:"password"` + Port types.Int64 `tfsdk:"port"` + ReplicationMethod *SourcePostgresUpdateMethod1 `tfsdk:"replication_method"` + Schemas []types.String `tfsdk:"schemas"` + SourceType types.String `tfsdk:"source_type"` + SslMode *SourcePostgresSSLModes1 `tfsdk:"ssl_mode"` + TunnelMethod *SourcePostgresSSHTunnelMethod `tfsdk:"tunnel_method"` + Username types.String `tfsdk:"username"` } diff --git a/internal/provider/type_source_postgres_replication_method.go b/internal/provider/type_source_postgres_replication_method.go deleted file mode 100755 index 08167e083..000000000 --- a/internal/provider/type_source_postgres_replication_method.go +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -type SourcePostgresReplicationMethod struct { - SourcePostgresReplicationMethodLogicalReplicationCDC *SourcePostgresReplicationMethodLogicalReplicationCDC `tfsdk:"source_postgres_replication_method_logical_replication_cdc"` - SourcePostgresReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_postgres_replication_method_standard"` - SourcePostgresReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_postgres_replication_method_standard_xmin"` - SourcePostgresUpdateReplicationMethodLogicalReplicationCDC *SourcePostgresUpdateReplicationMethodLogicalReplicationCDC `tfsdk:"source_postgres_update_replication_method_logical_replication_cdc"` - SourcePostgresUpdateReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_postgres_update_replication_method_standard"` - SourcePostgresUpdateReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_postgres_update_replication_method_standard_xmin"` -} diff --git a/internal/provider/type_source_postgres_replication_method1.go b/internal/provider/type_source_postgres_replication_method1.go deleted file mode 100755 index 24b8c7a95..000000000 --- a/internal/provider/type_source_postgres_replication_method1.go +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -type SourcePostgresReplicationMethod1 struct { - SourcePostgresReplicationMethodLogicalReplicationCDC *SourcePostgresReplicationMethodLogicalReplicationCDC1 `tfsdk:"source_postgres_replication_method_logical_replication_cdc"` - SourcePostgresReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_postgres_replication_method_standard"` - SourcePostgresReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_postgres_replication_method_standard_xmin"` - SourcePostgresUpdateReplicationMethodLogicalReplicationCDC *SourcePostgresUpdateReplicationMethodLogicalReplicationCDC1 `tfsdk:"source_postgres_update_replication_method_logical_replication_cdc"` - SourcePostgresUpdateReplicationMethodStandard *SourceAlloydbReplicationMethodStandard `tfsdk:"source_postgres_update_replication_method_standard"` - SourcePostgresUpdateReplicationMethodStandardXmin *SourceAlloydbReplicationMethodStandardXmin `tfsdk:"source_postgres_update_replication_method_standard_xmin"` -} diff --git a/internal/provider/type_source_postgres_update_method.go b/internal/provider/type_source_postgres_update_method.go new file mode 100755 index 000000000..717f7ed90 --- /dev/null +++ b/internal/provider/type_source_postgres_update_method.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type SourcePostgresUpdateMethod struct { + SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_method_detect_changes_with_xmin_system_column"` + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC `tfsdk:"source_postgres_update_method_read_changes_using_write_ahead_log_cdc"` + SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_method_scan_changes_with_user_defined_cursor"` + SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_update_method_detect_changes_with_xmin_system_column"` + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC `tfsdk:"source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc"` + SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_update_method_scan_changes_with_user_defined_cursor"` +} diff --git a/internal/provider/type_source_postgres_update_method1.go b/internal/provider/type_source_postgres_update_method1.go new file mode 100755 index 000000000..1969f03f3 --- /dev/null +++ b/internal/provider/type_source_postgres_update_method1.go @@ -0,0 +1,12 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type SourcePostgresUpdateMethod1 struct { + SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_method_detect_changes_with_xmin_system_column"` + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC1 `tfsdk:"source_postgres_update_method_read_changes_using_write_ahead_log_cdc"` + SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_method_scan_changes_with_user_defined_cursor"` + SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn `tfsdk:"source_postgres_update_update_method_detect_changes_with_xmin_system_column"` + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC1 `tfsdk:"source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc"` + SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor `tfsdk:"source_postgres_update_update_method_scan_changes_with_user_defined_cursor"` +} diff --git a/internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go b/internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go new file mode 100755 index 000000000..9a3b9914e --- /dev/null +++ b/internal/provider/type_source_postgres_update_method_detect_changes_with_xmin_system_column.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn struct { + Method types.String `tfsdk:"method"` +} diff --git a/internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc.go b/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go similarity index 91% rename from internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc.go rename to internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go index d17b94b9e..799c04180 100755 --- a/internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc.go +++ b/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc.go @@ -4,7 +4,7 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourcePostgresUpdateReplicationMethodLogicalReplicationCDC struct { +type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC struct { InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"` LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"` Method types.String `tfsdk:"method"` diff --git a/internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc1.go b/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go similarity index 91% rename from internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc1.go rename to internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go index 2e8593aa4..4cf9424f8 100755 --- a/internal/provider/type_source_postgres_update_replication_method_logical_replication_cdc1.go +++ b/internal/provider/type_source_postgres_update_method_read_changes_using_write_ahead_log_cdc1.go @@ -4,7 +4,7 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourcePostgresUpdateReplicationMethodLogicalReplicationCDC1 struct { +type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC1 struct { InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"` LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"` Method types.String `tfsdk:"method"` diff --git a/internal/provider/type_source_mssql_replication_method_standard.go b/internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go similarity index 72% rename from internal/provider/type_source_mssql_replication_method_standard.go rename to internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go index 1c6a88441..c0bf69744 100755 --- a/internal/provider/type_source_mssql_replication_method_standard.go +++ b/internal/provider/type_source_postgres_update_method_scan_changes_with_user_defined_cursor.go @@ -4,6 +4,6 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourceMssqlReplicationMethodStandard struct { +type SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor struct { Method types.String `tfsdk:"method"` } diff --git a/internal/provider/type_source_postgres_replication_method_logical_replication_cdc1.go b/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go similarity index 89% rename from internal/provider/type_source_postgres_replication_method_logical_replication_cdc1.go rename to internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go index 84c568c48..b77b1e797 100755 --- a/internal/provider/type_source_postgres_replication_method_logical_replication_cdc1.go +++ b/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc.go @@ -4,7 +4,7 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourcePostgresReplicationMethodLogicalReplicationCDC1 struct { +type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC struct { InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"` LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"` Method types.String `tfsdk:"method"` diff --git a/internal/provider/type_source_postgres_replication_method_logical_replication_cdc.go b/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go similarity index 88% rename from internal/provider/type_source_postgres_replication_method_logical_replication_cdc.go rename to internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go index 7d83ee6e7..af29faa57 100755 --- a/internal/provider/type_source_postgres_replication_method_logical_replication_cdc.go +++ b/internal/provider/type_source_postgres_update_update_method_read_changes_using_write_ahead_log_cdc1.go @@ -4,7 +4,7 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" -type SourcePostgresReplicationMethodLogicalReplicationCDC struct { +type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC1 struct { InitialWaitingSeconds types.Int64 `tfsdk:"initial_waiting_seconds"` LsnCommitBehaviour types.String `tfsdk:"lsn_commit_behaviour"` Method types.String `tfsdk:"method"` diff --git a/internal/provider/type_source_posthog.go b/internal/provider/type_source_posthog.go index 3b3eb5b96..826d569c4 100755 --- a/internal/provider/type_source_posthog.go +++ b/internal/provider/type_source_posthog.go @@ -5,8 +5,9 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourcePosthog struct { - APIKey types.String `tfsdk:"api_key"` - BaseURL types.String `tfsdk:"base_url"` - SourceType types.String `tfsdk:"source_type"` - StartDate types.String `tfsdk:"start_date"` + APIKey types.String `tfsdk:"api_key"` + BaseURL types.String `tfsdk:"base_url"` + EventsTimeStep types.Int64 `tfsdk:"events_time_step"` + SourceType types.String `tfsdk:"source_type"` + StartDate types.String `tfsdk:"start_date"` } diff --git a/internal/provider/type_source_s3.go b/internal/provider/type_source_s3.go index cc17531f6..f810d1468 100755 --- a/internal/provider/type_source_s3.go +++ b/internal/provider/type_source_s3.go @@ -5,10 +5,16 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourceS3 struct { - Dataset types.String `tfsdk:"dataset"` - Format *SourceS3FileFormat `tfsdk:"format"` - PathPattern types.String `tfsdk:"path_pattern"` - Provider SourceS3S3AmazonWebServices `tfsdk:"provider"` - Schema types.String `tfsdk:"schema"` - SourceType types.String `tfsdk:"source_type"` + AwsAccessKeyID types.String `tfsdk:"aws_access_key_id"` + AwsSecretAccessKey types.String `tfsdk:"aws_secret_access_key"` + Bucket types.String `tfsdk:"bucket"` + Dataset types.String `tfsdk:"dataset"` + Endpoint types.String `tfsdk:"endpoint"` + Format *SourceS3FileFormat `tfsdk:"format"` + PathPattern types.String `tfsdk:"path_pattern"` + Provider *SourceS3S3AmazonWebServices `tfsdk:"provider"` + Schema types.String `tfsdk:"schema"` + SourceType types.String `tfsdk:"source_type"` + StartDate types.String `tfsdk:"start_date"` + Streams []SourceS3FileBasedStreamConfig `tfsdk:"streams"` } diff --git a/internal/provider/type_source_s3_file_based_stream_config.go b/internal/provider/type_source_s3_file_based_stream_config.go new file mode 100755 index 000000000..4bb775cd2 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config.go @@ -0,0 +1,18 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfig struct { + DaysToSyncIfHistoryIsFull types.Int64 `tfsdk:"days_to_sync_if_history_is_full"` + FileType types.String `tfsdk:"file_type"` + Format *SourceS3FileBasedStreamConfigFormat `tfsdk:"format"` + Globs []types.String `tfsdk:"globs"` + InputSchema types.String `tfsdk:"input_schema"` + LegacyPrefix types.String `tfsdk:"legacy_prefix"` + Name types.String `tfsdk:"name"` + PrimaryKey types.String `tfsdk:"primary_key"` + Schemaless types.Bool `tfsdk:"schemaless"` + ValidationPolicy types.String `tfsdk:"validation_policy"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format.go b/internal/provider/type_source_s3_file_based_stream_config_format.go new file mode 100755 index 000000000..b92efa177 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format.go @@ -0,0 +1,14 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type SourceS3FileBasedStreamConfigFormat struct { + SourceS3FileBasedStreamConfigFormatAvroFormat *SourceS3FileBasedStreamConfigFormatAvroFormat `tfsdk:"source_s3_file_based_stream_config_format_avro_format"` + SourceS3FileBasedStreamConfigFormatCSVFormat *SourceS3FileBasedStreamConfigFormatCSVFormat `tfsdk:"source_s3_file_based_stream_config_format_csv_format"` + SourceS3FileBasedStreamConfigFormatJsonlFormat *SourceS3FileBasedStreamConfigFormatJsonlFormat `tfsdk:"source_s3_file_based_stream_config_format_jsonl_format"` + SourceS3FileBasedStreamConfigFormatParquetFormat *SourceS3FileBasedStreamConfigFormatParquetFormat `tfsdk:"source_s3_file_based_stream_config_format_parquet_format"` + SourceS3UpdateFileBasedStreamConfigFormatAvroFormat *SourceS3FileBasedStreamConfigFormatAvroFormat `tfsdk:"source_s3_update_file_based_stream_config_format_avro_format"` + SourceS3UpdateFileBasedStreamConfigFormatCSVFormat *SourceS3UpdateFileBasedStreamConfigFormatCSVFormat `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format"` + SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat *SourceS3FileBasedStreamConfigFormatJsonlFormat `tfsdk:"source_s3_update_file_based_stream_config_format_jsonl_format"` + SourceS3UpdateFileBasedStreamConfigFormatParquetFormat *SourceS3FileBasedStreamConfigFormatParquetFormat `tfsdk:"source_s3_update_file_based_stream_config_format_parquet_format"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go new file mode 100755 index 000000000..b9170e6cb --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_avro_format.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatAvroFormat struct { + DoubleAsString types.Bool `tfsdk:"double_as_string"` + Filetype types.String `tfsdk:"filetype"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go new file mode 100755 index 000000000..3eb886dc5 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format.go @@ -0,0 +1,22 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatCSVFormat struct { + Delimiter types.String `tfsdk:"delimiter"` + DoubleQuote types.Bool `tfsdk:"double_quote"` + Encoding types.String `tfsdk:"encoding"` + EscapeChar types.String `tfsdk:"escape_char"` + FalseValues []types.String `tfsdk:"false_values"` + Filetype types.String `tfsdk:"filetype"` + HeaderDefinition *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `tfsdk:"header_definition"` + InferenceType types.String `tfsdk:"inference_type"` + NullValues []types.String `tfsdk:"null_values"` + QuoteChar types.String `tfsdk:"quote_char"` + SkipRowsAfterHeader types.Int64 `tfsdk:"skip_rows_after_header"` + SkipRowsBeforeHeader types.Int64 `tfsdk:"skip_rows_before_header"` + StringsCanBeNull types.Bool `tfsdk:"strings_can_be_null"` + TrueValues []types.String `tfsdk:"true_values"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go new file mode 100755 index 000000000..f9bed06d3 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct { + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated `tfsdk:"source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated"` + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV `tfsdk:"source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv"` + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided `tfsdk:"source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go new file mode 100755 index 000000000..79580fa10 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated struct { + HeaderDefinitionType types.String `tfsdk:"header_definition_type"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go new file mode 100755 index 000000000..e083f93bd --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_from_csv.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV struct { + HeaderDefinitionType types.String `tfsdk:"header_definition_type"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go new file mode 100755 index 000000000..ace5d88b2 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_csv_format_csv_header_definition_user_provided.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided struct { + ColumnNames []types.String `tfsdk:"column_names"` + HeaderDefinitionType types.String `tfsdk:"header_definition_type"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go new file mode 100755 index 000000000..41910f3b3 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_jsonl_format.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatJsonlFormat struct { + Filetype types.String `tfsdk:"filetype"` +} diff --git a/internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go b/internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go new file mode 100755 index 000000000..8553b9023 --- /dev/null +++ b/internal/provider/type_source_s3_file_based_stream_config_format_parquet_format.go @@ -0,0 +1,10 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3FileBasedStreamConfigFormatParquetFormat struct { + DecimalAsFloat types.Bool `tfsdk:"decimal_as_float"` + Filetype types.String `tfsdk:"filetype"` +} diff --git a/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go b/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go new file mode 100755 index 000000000..00d6f4b8f --- /dev/null +++ b/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format.go @@ -0,0 +1,22 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +import "github.com/hashicorp/terraform-plugin-framework/types" + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormat struct { + Delimiter types.String `tfsdk:"delimiter"` + DoubleQuote types.Bool `tfsdk:"double_quote"` + Encoding types.String `tfsdk:"encoding"` + EscapeChar types.String `tfsdk:"escape_char"` + FalseValues []types.String `tfsdk:"false_values"` + Filetype types.String `tfsdk:"filetype"` + HeaderDefinition *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `tfsdk:"header_definition"` + InferenceType types.String `tfsdk:"inference_type"` + NullValues []types.String `tfsdk:"null_values"` + QuoteChar types.String `tfsdk:"quote_char"` + SkipRowsAfterHeader types.Int64 `tfsdk:"skip_rows_after_header"` + SkipRowsBeforeHeader types.Int64 `tfsdk:"skip_rows_before_header"` + StringsCanBeNull types.Bool `tfsdk:"strings_can_be_null"` + TrueValues []types.String `tfsdk:"true_values"` +} diff --git a/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go b/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go new file mode 100755 index 000000000..bfe0c369c --- /dev/null +++ b/internal/provider/type_source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package provider + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct { + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_from_csv"` + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_autogenerated"` + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided `tfsdk:"source_s3_update_file_based_stream_config_format_csv_format_csv_header_definition_user_provided"` +} diff --git a/internal/provider/type_source_zendesk_sunshine1.go b/internal/provider/type_source_zendesk_sunshine1.go deleted file mode 100755 index 6b8f165ca..000000000 --- a/internal/provider/type_source_zendesk_sunshine1.go +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshine1 struct { - Credentials *SourceZendeskSunshineAuthorizationMethod1 `tfsdk:"credentials"` - SourceType types.String `tfsdk:"source_type"` - StartDate types.String `tfsdk:"start_date"` - Subdomain types.String `tfsdk:"subdomain"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method.go b/internal/provider/type_source_zendesk_sunshine_authorization_method.go index 3cb61c523..b63691156 100755 --- a/internal/provider/type_source_zendesk_sunshine_authorization_method.go +++ b/internal/provider/type_source_zendesk_sunshine_authorization_method.go @@ -3,8 +3,8 @@ package provider type SourceZendeskSunshineAuthorizationMethod struct { - SourceZendeskSunshineAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken `tfsdk:"source_zendesk_sunshine_authorization_method_api_token"` - SourceZendeskSunshineAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_sunshine_authorization_method_o_auth2_0"` - SourceZendeskSunshineUpdateAuthorizationMethodAPIToken *SourceZendeskSunshineUpdateAuthorizationMethodAPIToken `tfsdk:"source_zendesk_sunshine_update_authorization_method_api_token"` - SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 *SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_sunshine_update_authorization_method_o_auth2_0"` + SourceZendeskSunshineAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken `tfsdk:"source_zendesk_sunshine_authorization_method_api_token"` + SourceZendeskSunshineAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_sunshine_authorization_method_o_auth2_0"` + SourceZendeskSunshineUpdateAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken `tfsdk:"source_zendesk_sunshine_update_authorization_method_api_token"` + SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth20 `tfsdk:"source_zendesk_sunshine_update_authorization_method_o_auth2_0"` } diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method1.go b/internal/provider/type_source_zendesk_sunshine_authorization_method1.go deleted file mode 100755 index c546e0971..000000000 --- a/internal/provider/type_source_zendesk_sunshine_authorization_method1.go +++ /dev/null @@ -1,10 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -type SourceZendeskSunshineAuthorizationMethod1 struct { - SourceZendeskSunshineAuthorizationMethodAPIToken *SourceZendeskSunshineAuthorizationMethodAPIToken1 `tfsdk:"source_zendesk_sunshine_authorization_method_api_token"` - SourceZendeskSunshineAuthorizationMethodOAuth20 *SourceZendeskSunshineAuthorizationMethodOAuth201 `tfsdk:"source_zendesk_sunshine_authorization_method_o_auth2_0"` - SourceZendeskSunshineUpdateAuthorizationMethodAPIToken *SourceZendeskSunshineUpdateAuthorizationMethodAPIToken1 `tfsdk:"source_zendesk_sunshine_update_authorization_method_api_token"` - SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 *SourceZendeskSunshineUpdateAuthorizationMethodOAuth201 `tfsdk:"source_zendesk_sunshine_update_authorization_method_o_auth2_0"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go b/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go index 8fdca86c3..4fca9b95a 100755 --- a/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go +++ b/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token.go @@ -5,8 +5,7 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourceZendeskSunshineAuthorizationMethodAPIToken struct { - APIToken types.String `tfsdk:"api_token"` - AuthMethod types.String `tfsdk:"auth_method"` - Email types.String `tfsdk:"email"` - AdditionalProperties types.String `tfsdk:"additional_properties"` + APIToken types.String `tfsdk:"api_token"` + AuthMethod types.String `tfsdk:"auth_method"` + Email types.String `tfsdk:"email"` } diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token1.go b/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token1.go deleted file mode 100755 index 2c160c5ee..000000000 --- a/internal/provider/type_source_zendesk_sunshine_authorization_method_api_token1.go +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshineAuthorizationMethodAPIToken1 struct { - APIToken types.String `tfsdk:"api_token"` - AuthMethod types.String `tfsdk:"auth_method"` - Email types.String `tfsdk:"email"` - AdditionalProperties types.String `tfsdk:"additional_properties"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go b/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go index 38990251c..5747b9c21 100755 --- a/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go +++ b/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth20.go @@ -5,9 +5,8 @@ package provider import "github.com/hashicorp/terraform-plugin-framework/types" type SourceZendeskSunshineAuthorizationMethodOAuth20 struct { - AccessToken types.String `tfsdk:"access_token"` - AuthMethod types.String `tfsdk:"auth_method"` - ClientID types.String `tfsdk:"client_id"` - ClientSecret types.String `tfsdk:"client_secret"` - AdditionalProperties types.String `tfsdk:"additional_properties"` + AccessToken types.String `tfsdk:"access_token"` + AuthMethod types.String `tfsdk:"auth_method"` + ClientID types.String `tfsdk:"client_id"` + ClientSecret types.String `tfsdk:"client_secret"` } diff --git a/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth201.go b/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth201.go deleted file mode 100755 index 2a66d0f1e..000000000 --- a/internal/provider/type_source_zendesk_sunshine_authorization_method_o_auth201.go +++ /dev/null @@ -1,13 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshineAuthorizationMethodOAuth201 struct { - AccessToken types.String `tfsdk:"access_token"` - AuthMethod types.String `tfsdk:"auth_method"` - ClientID types.String `tfsdk:"client_id"` - ClientSecret types.String `tfsdk:"client_secret"` - AdditionalProperties types.String `tfsdk:"additional_properties"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token.go b/internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token.go deleted file mode 100755 index 32e7de167..000000000 --- a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token.go +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshineUpdateAuthorizationMethodAPIToken struct { - APIToken types.String `tfsdk:"api_token"` - AuthMethod types.String `tfsdk:"auth_method"` - Email types.String `tfsdk:"email"` - AdditionalProperties types.String `tfsdk:"additional_properties"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token1.go b/internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token1.go deleted file mode 100755 index a3933c840..000000000 --- a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_api_token1.go +++ /dev/null @@ -1,12 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshineUpdateAuthorizationMethodAPIToken1 struct { - APIToken types.String `tfsdk:"api_token"` - AuthMethod types.String `tfsdk:"auth_method"` - Email types.String `tfsdk:"email"` - AdditionalProperties types.String `tfsdk:"additional_properties"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth20.go b/internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth20.go deleted file mode 100755 index cd88297c8..000000000 --- a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth20.go +++ /dev/null @@ -1,13 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 struct { - AccessToken types.String `tfsdk:"access_token"` - AuthMethod types.String `tfsdk:"auth_method"` - ClientID types.String `tfsdk:"client_id"` - ClientSecret types.String `tfsdk:"client_secret"` - AdditionalProperties types.String `tfsdk:"additional_properties"` -} diff --git a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth201.go b/internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth201.go deleted file mode 100755 index 6c9479820..000000000 --- a/internal/provider/type_source_zendesk_sunshine_update_authorization_method_o_auth201.go +++ /dev/null @@ -1,13 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package provider - -import "github.com/hashicorp/terraform-plugin-framework/types" - -type SourceZendeskSunshineUpdateAuthorizationMethodOAuth201 struct { - AccessToken types.String `tfsdk:"access_token"` - AuthMethod types.String `tfsdk:"auth_method"` - ClientID types.String `tfsdk:"client_id"` - ClientSecret types.String `tfsdk:"client_secret"` - AdditionalProperties types.String `tfsdk:"additional_properties"` -} diff --git a/internal/sdk/destinations.go b/internal/sdk/destinations.go index d500b78f7..aaa586ce9 100755 --- a/internal/sdk/destinations.go +++ b/internal/sdk/destinations.go @@ -1366,6 +1366,73 @@ func (s *destinations) CreateDestinationLangchain(ctx context.Context, request s return res, nil } +// CreateDestinationMilvus - Create a destination +// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination. +func (s *destinations) CreateDestinationMilvus(ctx context.Context, request shared.DestinationMilvusCreateRequest) (*operations.CreateDestinationMilvusResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url := strings.TrimSuffix(baseURL, "/") + "/destinations#Milvus" + + bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") + if err != nil { + return nil, fmt.Errorf("error serializing request body: %w", err) + } + + debugBody := bytes.NewBuffer([]byte{}) + debugReader := io.TeeReader(bodyReader, debugBody) + + req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + req.Header.Set("Content-Type", reqContentType) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Request.Body = io.NopCloser(debugBody) + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.CreateDestinationMilvusResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(contentType, `application/json`): + var out *shared.DestinationResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { + return res, err + } + + res.DestinationResponse = out + } + case httpRes.StatusCode == 400: + fallthrough + case httpRes.StatusCode == 403: + } + + return res, nil +} + // CreateDestinationMongodb - Create a destination // Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination. func (s *destinations) CreateDestinationMongodb(ctx context.Context, request shared.DestinationMongodbCreateRequest) (*operations.CreateDestinationMongodbResponse, error) { @@ -1634,6 +1701,73 @@ func (s *destinations) CreateDestinationOracle(ctx context.Context, request shar return res, nil } +// CreateDestinationPinecone - Create a destination +// Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination. +func (s *destinations) CreateDestinationPinecone(ctx context.Context, request shared.DestinationPineconeCreateRequest) (*operations.CreateDestinationPineconeResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url := strings.TrimSuffix(baseURL, "/") + "/destinations#Pinecone" + + bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") + if err != nil { + return nil, fmt.Errorf("error serializing request body: %w", err) + } + + debugBody := bytes.NewBuffer([]byte{}) + debugReader := io.TeeReader(bodyReader, debugBody) + + req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + req.Header.Set("Content-Type", reqContentType) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Request.Body = io.NopCloser(debugBody) + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.CreateDestinationPineconeResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(contentType, `application/json`): + var out *shared.DestinationResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { + return res, err + } + + res.DestinationResponse = out + } + case httpRes.StatusCode == 400: + fallthrough + case httpRes.StatusCode == 403: + } + + return res, nil +} + // CreateDestinationPostgres - Create a destination // Creates a destination given a name, workspace id, and a json blob containing the configuration for the destination. func (s *destinations) CreateDestinationPostgres(ctx context.Context, request shared.DestinationPostgresCreateRequest) (*operations.CreateDestinationPostgresResponse, error) { @@ -3438,6 +3572,56 @@ func (s *destinations) DeleteDestinationLangchain(ctx context.Context, request o return res, nil } +// DeleteDestinationMilvus - Delete a Destination +func (s *destinations) DeleteDestinationMilvus(ctx context.Context, request operations.DeleteDestinationMilvusRequest) (*operations.DeleteDestinationMilvusResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Milvus", request, nil) + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "*/*") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.DeleteDestinationMilvusResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: + fallthrough + case httpRes.StatusCode == 403: + fallthrough + case httpRes.StatusCode == 404: + } + + return res, nil +} + // DeleteDestinationMongodb - Delete a Destination func (s *destinations) DeleteDestinationMongodb(ctx context.Context, request operations.DeleteDestinationMongodbRequest) (*operations.DeleteDestinationMongodbResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -3638,6 +3822,56 @@ func (s *destinations) DeleteDestinationOracle(ctx context.Context, request oper return res, nil } +// DeleteDestinationPinecone - Delete a Destination +func (s *destinations) DeleteDestinationPinecone(ctx context.Context, request operations.DeleteDestinationPineconeRequest) (*operations.DeleteDestinationPineconeResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pinecone", request, nil) + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "*/*") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.DeleteDestinationPineconeResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: + fallthrough + case httpRes.StatusCode == 403: + fallthrough + case httpRes.StatusCode == 404: + } + + return res, nil +} + // DeleteDestinationPostgres - Delete a Destination func (s *destinations) DeleteDestinationPostgres(ctx context.Context, request operations.DeleteDestinationPostgresRequest) (*operations.DeleteDestinationPostgresResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -5398,6 +5632,64 @@ func (s *destinations) GetDestinationLangchain(ctx context.Context, request oper return res, nil } +// GetDestinationMilvus - Get Destination details +func (s *destinations) GetDestinationMilvus(ctx context.Context, request operations.GetDestinationMilvusRequest) (*operations.GetDestinationMilvusResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Milvus", request, nil) + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.GetDestinationMilvusResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(contentType, `application/json`): + var out *shared.DestinationResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { + return res, err + } + + res.DestinationResponse = out + } + case httpRes.StatusCode == 403: + fallthrough + case httpRes.StatusCode == 404: + } + + return res, nil +} + // GetDestinationMongodb - Get Destination details func (s *destinations) GetDestinationMongodb(ctx context.Context, request operations.GetDestinationMongodbRequest) (*operations.GetDestinationMongodbResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -5630,6 +5922,64 @@ func (s *destinations) GetDestinationOracle(ctx context.Context, request operati return res, nil } +// GetDestinationPinecone - Get Destination details +func (s *destinations) GetDestinationPinecone(ctx context.Context, request operations.GetDestinationPineconeRequest) (*operations.GetDestinationPineconeResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pinecone", request, nil) + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + req, err := http.NewRequestWithContext(ctx, "GET", url, nil) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.GetDestinationPineconeResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(contentType, `application/json`): + var out *shared.DestinationResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { + return res, err + } + + res.DestinationResponse = out + } + case httpRes.StatusCode == 403: + fallthrough + case httpRes.StatusCode == 404: + } + + return res, nil +} + // GetDestinationPostgres - Get Destination details func (s *destinations) GetDestinationPostgres(ctx context.Context, request operations.GetDestinationPostgresRequest) (*operations.GetDestinationPostgresResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -7682,6 +8032,67 @@ func (s *destinations) PutDestinationLangchain(ctx context.Context, request oper return res, nil } +// PutDestinationMilvus - Update a Destination fully +func (s *destinations) PutDestinationMilvus(ctx context.Context, request operations.PutDestinationMilvusRequest) (*operations.PutDestinationMilvusResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Milvus", request, nil) + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationMilvusPutRequest", "json") + if err != nil { + return nil, fmt.Errorf("error serializing request body: %w", err) + } + + debugBody := bytes.NewBuffer([]byte{}) + debugReader := io.TeeReader(bodyReader, debugBody) + + req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "*/*") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + req.Header.Set("Content-Type", reqContentType) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Request.Body = io.NopCloser(debugBody) + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.PutDestinationMilvusResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: + fallthrough + case httpRes.StatusCode == 403: + fallthrough + case httpRes.StatusCode == 404: + } + + return res, nil +} + // PutDestinationMongodb - Update a Destination fully func (s *destinations) PutDestinationMongodb(ctx context.Context, request operations.PutDestinationMongodbRequest) (*operations.PutDestinationMongodbResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -7926,6 +8337,67 @@ func (s *destinations) PutDestinationOracle(ctx context.Context, request operati return res, nil } +// PutDestinationPinecone - Update a Destination fully +func (s *destinations) PutDestinationPinecone(ctx context.Context, request operations.PutDestinationPineconeRequest) (*operations.PutDestinationPineconeResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url, err := utils.GenerateURL(ctx, baseURL, "/destinations/{destinationId}#Pinecone", request, nil) + if err != nil { + return nil, fmt.Errorf("error generating URL: %w", err) + } + + bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "DestinationPineconePutRequest", "json") + if err != nil { + return nil, fmt.Errorf("error serializing request body: %w", err) + } + + debugBody := bytes.NewBuffer([]byte{}) + debugReader := io.TeeReader(bodyReader, debugBody) + + req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "*/*") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + req.Header.Set("Content-Type", reqContentType) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Request.Body = io.NopCloser(debugBody) + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.PutDestinationPineconeResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: + fallthrough + case httpRes.StatusCode == 403: + fallthrough + case httpRes.StatusCode == 404: + } + + return res, nil +} + // PutDestinationPostgres - Update a Destination fully func (s *destinations) PutDestinationPostgres(ctx context.Context, request operations.PutDestinationPostgresRequest) (*operations.PutDestinationPostgresResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) diff --git a/internal/sdk/pkg/models/operations/createsourceopenweather.go b/internal/sdk/pkg/models/operations/createdestinationmilvus.go similarity index 56% rename from internal/sdk/pkg/models/operations/createsourceopenweather.go rename to internal/sdk/pkg/models/operations/createdestinationmilvus.go index 6d42bcb9b..a3f0ee427 100755 --- a/internal/sdk/pkg/models/operations/createsourceopenweather.go +++ b/internal/sdk/pkg/models/operations/createdestinationmilvus.go @@ -7,10 +7,10 @@ import ( "net/http" ) -type CreateSourceOpenweatherResponse struct { +type CreateDestinationMilvusResponse struct { ContentType string // Successful operation - SourceResponse *shared.SourceResponse - StatusCode int - RawResponse *http.Response + DestinationResponse *shared.DestinationResponse + StatusCode int + RawResponse *http.Response } diff --git a/internal/sdk/pkg/models/operations/createsourcepublicapis.go b/internal/sdk/pkg/models/operations/createdestinationpinecone.go similarity index 56% rename from internal/sdk/pkg/models/operations/createsourcepublicapis.go rename to internal/sdk/pkg/models/operations/createdestinationpinecone.go index 8ba4f01a5..4cfa6e3be 100755 --- a/internal/sdk/pkg/models/operations/createsourcepublicapis.go +++ b/internal/sdk/pkg/models/operations/createdestinationpinecone.go @@ -7,10 +7,10 @@ import ( "net/http" ) -type CreateSourcePublicApisResponse struct { +type CreateDestinationPineconeResponse struct { ContentType string // Successful operation - SourceResponse *shared.SourceResponse - StatusCode int - RawResponse *http.Response + DestinationResponse *shared.DestinationResponse + StatusCode int + RawResponse *http.Response } diff --git a/internal/sdk/pkg/models/operations/createsourcedatadog.go b/internal/sdk/pkg/models/operations/createsourcedatadog.go deleted file mode 100755 index ca03a95f5..000000000 --- a/internal/sdk/pkg/models/operations/createsourcedatadog.go +++ /dev/null @@ -1,16 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type CreateSourceDatadogResponse struct { - ContentType string - // Successful operation - SourceResponse *shared.SourceResponse - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/deletesourceopenweather.go b/internal/sdk/pkg/models/operations/deletedestinationmilvus.go similarity index 51% rename from internal/sdk/pkg/models/operations/deletesourceopenweather.go rename to internal/sdk/pkg/models/operations/deletedestinationmilvus.go index 7dda85879..d79cef4d5 100755 --- a/internal/sdk/pkg/models/operations/deletesourceopenweather.go +++ b/internal/sdk/pkg/models/operations/deletedestinationmilvus.go @@ -6,11 +6,11 @@ import ( "net/http" ) -type DeleteSourceOpenweatherRequest struct { - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` +type DeleteDestinationMilvusRequest struct { + DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"` } -type DeleteSourceOpenweatherResponse struct { +type DeleteDestinationMilvusResponse struct { ContentType string StatusCode int RawResponse *http.Response diff --git a/internal/sdk/pkg/models/operations/deletesourcepublicapis.go b/internal/sdk/pkg/models/operations/deletedestinationpinecone.go similarity index 51% rename from internal/sdk/pkg/models/operations/deletesourcepublicapis.go rename to internal/sdk/pkg/models/operations/deletedestinationpinecone.go index d04495d78..936ad92bf 100755 --- a/internal/sdk/pkg/models/operations/deletesourcepublicapis.go +++ b/internal/sdk/pkg/models/operations/deletedestinationpinecone.go @@ -6,11 +6,11 @@ import ( "net/http" ) -type DeleteSourcePublicApisRequest struct { - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` +type DeleteDestinationPineconeRequest struct { + DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"` } -type DeleteSourcePublicApisResponse struct { +type DeleteDestinationPineconeResponse struct { ContentType string StatusCode int RawResponse *http.Response diff --git a/internal/sdk/pkg/models/operations/deletesourcedatadog.go b/internal/sdk/pkg/models/operations/deletesourcedatadog.go deleted file mode 100755 index 9f74c54b6..000000000 --- a/internal/sdk/pkg/models/operations/deletesourcedatadog.go +++ /dev/null @@ -1,17 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "net/http" -) - -type DeleteSourceDatadogRequest struct { - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type DeleteSourceDatadogResponse struct { - ContentType string - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/getdestinationmilvus.go b/internal/sdk/pkg/models/operations/getdestinationmilvus.go new file mode 100755 index 000000000..1796f623e --- /dev/null +++ b/internal/sdk/pkg/models/operations/getdestinationmilvus.go @@ -0,0 +1,20 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package operations + +import ( + "airbyte/internal/sdk/pkg/models/shared" + "net/http" +) + +type GetDestinationMilvusRequest struct { + DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"` +} + +type GetDestinationMilvusResponse struct { + ContentType string + // Get a Destination by the id in the path. + DestinationResponse *shared.DestinationResponse + StatusCode int + RawResponse *http.Response +} diff --git a/internal/sdk/pkg/models/operations/getdestinationpinecone.go b/internal/sdk/pkg/models/operations/getdestinationpinecone.go new file mode 100755 index 000000000..a5b4e2cb4 --- /dev/null +++ b/internal/sdk/pkg/models/operations/getdestinationpinecone.go @@ -0,0 +1,20 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package operations + +import ( + "airbyte/internal/sdk/pkg/models/shared" + "net/http" +) + +type GetDestinationPineconeRequest struct { + DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"` +} + +type GetDestinationPineconeResponse struct { + ContentType string + // Get a Destination by the id in the path. + DestinationResponse *shared.DestinationResponse + StatusCode int + RawResponse *http.Response +} diff --git a/internal/sdk/pkg/models/operations/getsourcedatadog.go b/internal/sdk/pkg/models/operations/getsourcedatadog.go deleted file mode 100755 index 89e59ad6d..000000000 --- a/internal/sdk/pkg/models/operations/getsourcedatadog.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type GetSourceDatadogRequest struct { - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type GetSourceDatadogResponse struct { - ContentType string - // Get a Source by the id in the path. - SourceResponse *shared.SourceResponse - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/getsourceopenweather.go b/internal/sdk/pkg/models/operations/getsourceopenweather.go deleted file mode 100755 index c72a86691..000000000 --- a/internal/sdk/pkg/models/operations/getsourceopenweather.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type GetSourceOpenweatherRequest struct { - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type GetSourceOpenweatherResponse struct { - ContentType string - // Get a Source by the id in the path. - SourceResponse *shared.SourceResponse - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/getsourcepublicapis.go b/internal/sdk/pkg/models/operations/getsourcepublicapis.go deleted file mode 100755 index f24428866..000000000 --- a/internal/sdk/pkg/models/operations/getsourcepublicapis.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type GetSourcePublicApisRequest struct { - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type GetSourcePublicApisResponse struct { - ContentType string - // Get a Source by the id in the path. - SourceResponse *shared.SourceResponse - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/putdestinationmilvus.go b/internal/sdk/pkg/models/operations/putdestinationmilvus.go new file mode 100755 index 000000000..7c29dde1e --- /dev/null +++ b/internal/sdk/pkg/models/operations/putdestinationmilvus.go @@ -0,0 +1,19 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package operations + +import ( + "airbyte/internal/sdk/pkg/models/shared" + "net/http" +) + +type PutDestinationMilvusRequest struct { + DestinationMilvusPutRequest *shared.DestinationMilvusPutRequest `request:"mediaType=application/json"` + DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"` +} + +type PutDestinationMilvusResponse struct { + ContentType string + StatusCode int + RawResponse *http.Response +} diff --git a/internal/sdk/pkg/models/operations/putdestinationpinecone.go b/internal/sdk/pkg/models/operations/putdestinationpinecone.go new file mode 100755 index 000000000..0af975ad6 --- /dev/null +++ b/internal/sdk/pkg/models/operations/putdestinationpinecone.go @@ -0,0 +1,19 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package operations + +import ( + "airbyte/internal/sdk/pkg/models/shared" + "net/http" +) + +type PutDestinationPineconeRequest struct { + DestinationPineconePutRequest *shared.DestinationPineconePutRequest `request:"mediaType=application/json"` + DestinationID string `pathParam:"style=simple,explode=false,name=destinationId"` +} + +type PutDestinationPineconeResponse struct { + ContentType string + StatusCode int + RawResponse *http.Response +} diff --git a/internal/sdk/pkg/models/operations/putsourcedatadog.go b/internal/sdk/pkg/models/operations/putsourcedatadog.go deleted file mode 100755 index cb3d856f9..000000000 --- a/internal/sdk/pkg/models/operations/putsourcedatadog.go +++ /dev/null @@ -1,19 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type PutSourceDatadogRequest struct { - SourceDatadogPutRequest *shared.SourceDatadogPutRequest `request:"mediaType=application/json"` - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type PutSourceDatadogResponse struct { - ContentType string - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/putsourceopenweather.go b/internal/sdk/pkg/models/operations/putsourceopenweather.go deleted file mode 100755 index c52482172..000000000 --- a/internal/sdk/pkg/models/operations/putsourceopenweather.go +++ /dev/null @@ -1,19 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type PutSourceOpenweatherRequest struct { - SourceOpenweatherPutRequest *shared.SourceOpenweatherPutRequest `request:"mediaType=application/json"` - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type PutSourceOpenweatherResponse struct { - ContentType string - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/operations/putsourcepublicapis.go b/internal/sdk/pkg/models/operations/putsourcepublicapis.go deleted file mode 100755 index bf13e4ce0..000000000 --- a/internal/sdk/pkg/models/operations/putsourcepublicapis.go +++ /dev/null @@ -1,19 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package operations - -import ( - "airbyte/internal/sdk/pkg/models/shared" - "net/http" -) - -type PutSourcePublicApisRequest struct { - SourcePublicApisPutRequest *shared.SourcePublicApisPutRequest `request:"mediaType=application/json"` - SourceID string `pathParam:"style=simple,explode=false,name=sourceId"` -} - -type PutSourcePublicApisResponse struct { - ContentType string - StatusCode int - RawResponse *http.Response -} diff --git a/internal/sdk/pkg/models/shared/connectionresponseroottypeforconnectionresponse.go b/internal/sdk/pkg/models/shared/connectionresponseroottypeforconnectionresponse.go deleted file mode 100755 index f2dbf47e4..000000000 --- a/internal/sdk/pkg/models/shared/connectionresponseroottypeforconnectionresponse.go +++ /dev/null @@ -1,24 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -// ConnectionResponseRootTypeForConnectionResponse - Provides details of a single connection. -type ConnectionResponseRootTypeForConnectionResponse struct { - // A list of configured stream options for a connection. - Configurations StreamConfigurations `json:"configurations"` - ConnectionID string `json:"connectionId"` - DataResidency GeographyEnum `json:"dataResidency"` - DestinationID string `json:"destinationId"` - Name string `json:"name"` - // Define the location where the data will be stored in the destination - NamespaceDefinition *NamespaceDefinitionEnum `json:"namespaceDefinition,omitempty"` - NamespaceFormat *string `json:"namespaceFormat,omitempty"` - // Set how Airbyte handles syncs when it detects a non-breaking schema change in the source - NonBreakingSchemaUpdatesBehavior *NonBreakingSchemaUpdatesBehaviorEnum `json:"nonBreakingSchemaUpdatesBehavior,omitempty"` - Prefix *string `json:"prefix,omitempty"` - // schedule for when the the connection should run, per the schedule type - Schedule ConnectionScheduleResponse `json:"schedule"` - SourceID string `json:"sourceId"` - Status ConnectionStatusEnum `json:"status"` - WorkspaceID string `json:"workspaceId"` -} diff --git a/internal/sdk/pkg/models/shared/connectionsresponse.go b/internal/sdk/pkg/models/shared/connectionsresponse.go index 61bc21ad1..b5fe04eec 100755 --- a/internal/sdk/pkg/models/shared/connectionsresponse.go +++ b/internal/sdk/pkg/models/shared/connectionsresponse.go @@ -2,7 +2,6 @@ package shared -// ConnectionsResponse - Successful operation type ConnectionsResponse struct { Data []ConnectionResponse `json:"data"` Next *string `json:"next,omitempty"` diff --git a/internal/sdk/pkg/models/shared/destinationazureblobstorage.go b/internal/sdk/pkg/models/shared/destinationazureblobstorage.go index 35b9bef68..6411e49df 100755 --- a/internal/sdk/pkg/models/shared/destinationazureblobstorage.go +++ b/internal/sdk/pkg/models/shared/destinationazureblobstorage.go @@ -156,21 +156,21 @@ func CreateDestinationAzureBlobStorageOutputFormatDestinationAzureBlobStorageOut func (u *DestinationAzureBlobStorageOutputFormat) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues := new(DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues) + destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues); err == nil { - u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues = destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues - u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues + if err := d.Decode(&destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { + u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON + u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON return nil } - destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON) + destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues := new(DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { - u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON - u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON + if err := d.Decode(&destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues); err == nil { + u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues = destinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues + u.Type = DestinationAzureBlobStorageOutputFormatTypeDestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues return nil } @@ -178,14 +178,14 @@ func (u *DestinationAzureBlobStorageOutputFormat) UnmarshalJSON(data []byte) err } func (u DestinationAzureBlobStorageOutputFormat) MarshalJSON() ([]byte, error) { - if u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues != nil { - return json.Marshal(u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues) - } - if u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON != nil { return json.Marshal(u.DestinationAzureBlobStorageOutputFormatJSONLinesNewlineDelimitedJSON) } + if u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues != nil { + return json.Marshal(u.DestinationAzureBlobStorageOutputFormatCSVCommaSeparatedValues) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go b/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go index dc1e507c5..8a55073c0 100755 --- a/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go +++ b/internal/sdk/pkg/models/shared/destinationazureblobstorageupdate.go @@ -132,21 +132,21 @@ func CreateDestinationAzureBlobStorageUpdateOutputFormatDestinationAzureBlobStor func (u *DestinationAzureBlobStorageUpdateOutputFormat) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues := new(DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues) + destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues); err == nil { - u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues = destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues - u.Type = DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues + if err := d.Decode(&destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { + u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON + u.Type = DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON return nil } - destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON) + destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues := new(DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { - u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON - u.Type = DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON + if err := d.Decode(&destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues); err == nil { + u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues = destinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues + u.Type = DestinationAzureBlobStorageUpdateOutputFormatTypeDestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues return nil } @@ -154,14 +154,14 @@ func (u *DestinationAzureBlobStorageUpdateOutputFormat) UnmarshalJSON(data []byt } func (u DestinationAzureBlobStorageUpdateOutputFormat) MarshalJSON() ([]byte, error) { - if u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues != nil { - return json.Marshal(u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues) - } - if u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil { return json.Marshal(u.DestinationAzureBlobStorageUpdateOutputFormatJSONLinesNewlineDelimitedJSON) } + if u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues != nil { + return json.Marshal(u.DestinationAzureBlobStorageUpdateOutputFormatCSVCommaSeparatedValues) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationbigquery.go b/internal/sdk/pkg/models/shared/destinationbigquery.go index bd7e06be8..06e2bf662 100755 --- a/internal/sdk/pkg/models/shared/destinationbigquery.go +++ b/internal/sdk/pkg/models/shared/destinationbigquery.go @@ -458,10 +458,8 @@ type DestinationBigquery struct { LoadingMethod *DestinationBigqueryLoadingMethod `json:"loading_method,omitempty"` // The GCP project ID for the project containing the target BigQuery dataset. Read more here. ProjectID string `json:"project_id"` - // (Early Access) The dataset to write raw tables into + // The dataset to write raw tables into RawDataDataset *string `json:"raw_data_dataset,omitempty"` // Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. TransformationPriority *DestinationBigqueryTransformationQueryRunType `json:"transformation_priority,omitempty"` - // (Early Access) Use Destinations V2. - Use1s1tFormat *bool `json:"use_1s1t_format,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go b/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go index abe1a3213..ab98a64be 100755 --- a/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go +++ b/internal/sdk/pkg/models/shared/destinationbigqueryupdate.go @@ -433,10 +433,8 @@ type DestinationBigqueryUpdate struct { LoadingMethod *DestinationBigqueryUpdateLoadingMethod `json:"loading_method,omitempty"` // The GCP project ID for the project containing the target BigQuery dataset. Read more here. ProjectID string `json:"project_id"` - // (Early Access) The dataset to write raw tables into + // The dataset to write raw tables into RawDataDataset *string `json:"raw_data_dataset,omitempty"` // Interactive run type means that the query is executed as soon as possible, and these queries count towards concurrent rate limit and daily limit. Read more about interactive run type here. Batch queries are queued and started as soon as idle resources are available in the BigQuery shared resource pool, which usually occurs within a few minutes. Batch queries don’t count towards your concurrent rate limit. Read more about batch queries here. The default "interactive" value is used if not set explicitly. TransformationPriority *DestinationBigqueryUpdateTransformationQueryRunType `json:"transformation_priority,omitempty"` - // (Early Access) Use Destinations V2. - Use1s1tFormat *bool `json:"use_1s1t_format,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/destinationdatabricks.go b/internal/sdk/pkg/models/shared/destinationdatabricks.go index 0aa9f85c5..e8ae885a8 100755 --- a/internal/sdk/pkg/models/shared/destinationdatabricks.go +++ b/internal/sdk/pkg/models/shared/destinationdatabricks.go @@ -271,21 +271,21 @@ func (u *DestinationDatabricksDataSource) UnmarshalJSON(data []byte) error { return nil } - destinationDatabricksDataSourceAmazonS3 := new(DestinationDatabricksDataSourceAmazonS3) + destinationDatabricksDataSourceAzureBlobStorage := new(DestinationDatabricksDataSourceAzureBlobStorage) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationDatabricksDataSourceAmazonS3); err == nil { - u.DestinationDatabricksDataSourceAmazonS3 = destinationDatabricksDataSourceAmazonS3 - u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAmazonS3 + if err := d.Decode(&destinationDatabricksDataSourceAzureBlobStorage); err == nil { + u.DestinationDatabricksDataSourceAzureBlobStorage = destinationDatabricksDataSourceAzureBlobStorage + u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAzureBlobStorage return nil } - destinationDatabricksDataSourceAzureBlobStorage := new(DestinationDatabricksDataSourceAzureBlobStorage) + destinationDatabricksDataSourceAmazonS3 := new(DestinationDatabricksDataSourceAmazonS3) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationDatabricksDataSourceAzureBlobStorage); err == nil { - u.DestinationDatabricksDataSourceAzureBlobStorage = destinationDatabricksDataSourceAzureBlobStorage - u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAzureBlobStorage + if err := d.Decode(&destinationDatabricksDataSourceAmazonS3); err == nil { + u.DestinationDatabricksDataSourceAmazonS3 = destinationDatabricksDataSourceAmazonS3 + u.Type = DestinationDatabricksDataSourceTypeDestinationDatabricksDataSourceAmazonS3 return nil } @@ -297,14 +297,14 @@ func (u DestinationDatabricksDataSource) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationDatabricksDataSourceRecommendedManagedTables) } - if u.DestinationDatabricksDataSourceAmazonS3 != nil { - return json.Marshal(u.DestinationDatabricksDataSourceAmazonS3) - } - if u.DestinationDatabricksDataSourceAzureBlobStorage != nil { return json.Marshal(u.DestinationDatabricksDataSourceAzureBlobStorage) } + if u.DestinationDatabricksDataSourceAmazonS3 != nil { + return json.Marshal(u.DestinationDatabricksDataSourceAmazonS3) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go b/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go index d2f243856..f791c2b54 100755 --- a/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go +++ b/internal/sdk/pkg/models/shared/destinationdatabricksupdate.go @@ -271,21 +271,21 @@ func (u *DestinationDatabricksUpdateDataSource) UnmarshalJSON(data []byte) error return nil } - destinationDatabricksUpdateDataSourceAmazonS3 := new(DestinationDatabricksUpdateDataSourceAmazonS3) + destinationDatabricksUpdateDataSourceAzureBlobStorage := new(DestinationDatabricksUpdateDataSourceAzureBlobStorage) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationDatabricksUpdateDataSourceAmazonS3); err == nil { - u.DestinationDatabricksUpdateDataSourceAmazonS3 = destinationDatabricksUpdateDataSourceAmazonS3 - u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAmazonS3 + if err := d.Decode(&destinationDatabricksUpdateDataSourceAzureBlobStorage); err == nil { + u.DestinationDatabricksUpdateDataSourceAzureBlobStorage = destinationDatabricksUpdateDataSourceAzureBlobStorage + u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAzureBlobStorage return nil } - destinationDatabricksUpdateDataSourceAzureBlobStorage := new(DestinationDatabricksUpdateDataSourceAzureBlobStorage) + destinationDatabricksUpdateDataSourceAmazonS3 := new(DestinationDatabricksUpdateDataSourceAmazonS3) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationDatabricksUpdateDataSourceAzureBlobStorage); err == nil { - u.DestinationDatabricksUpdateDataSourceAzureBlobStorage = destinationDatabricksUpdateDataSourceAzureBlobStorage - u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAzureBlobStorage + if err := d.Decode(&destinationDatabricksUpdateDataSourceAmazonS3); err == nil { + u.DestinationDatabricksUpdateDataSourceAmazonS3 = destinationDatabricksUpdateDataSourceAmazonS3 + u.Type = DestinationDatabricksUpdateDataSourceTypeDestinationDatabricksUpdateDataSourceAmazonS3 return nil } @@ -297,14 +297,14 @@ func (u DestinationDatabricksUpdateDataSource) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationDatabricksUpdateDataSourceRecommendedManagedTables) } - if u.DestinationDatabricksUpdateDataSourceAmazonS3 != nil { - return json.Marshal(u.DestinationDatabricksUpdateDataSourceAmazonS3) - } - if u.DestinationDatabricksUpdateDataSourceAzureBlobStorage != nil { return json.Marshal(u.DestinationDatabricksUpdateDataSourceAzureBlobStorage) } + if u.DestinationDatabricksUpdateDataSourceAmazonS3 != nil { + return json.Marshal(u.DestinationDatabricksUpdateDataSourceAmazonS3) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationgcs.go b/internal/sdk/pkg/models/shared/destinationgcs.go index 4a02a48d9..e0a586d4b 100755 --- a/internal/sdk/pkg/models/shared/destinationgcs.go +++ b/internal/sdk/pkg/models/shared/destinationgcs.go @@ -808,21 +808,30 @@ func (u *DestinationGcsOutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON return nil } - destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate) + destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { - u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate - u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate + if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { + u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 + u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 return nil } - destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2) + destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { - u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 - u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 + if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { + u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy + u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy + return nil + } + + destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { + u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate + u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate return nil } @@ -844,15 +853,6 @@ func (u *DestinationGcsOutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON return nil } - destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { - u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy - u.Type = DestinationGcsOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy - return nil - } - return errors.New("could not unmarshal into supported union types") } @@ -861,14 +861,18 @@ func (u DestinationGcsOutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecNoCompression) } - if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate != nil { - return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate) - } - if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil { return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecBzip2) } + if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy != nil { + return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy) + } + + if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate != nil { + return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecDeflate) + } + if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz != nil { return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecXz) } @@ -877,10 +881,6 @@ func (u DestinationGcsOutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecZstandard) } - if u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy != nil { - return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvroCompressionCodecSnappy) - } - return nil, nil } @@ -981,21 +981,21 @@ func (u *DestinationGcsOutputFormat) UnmarshalJSON(data []byte) error { return nil } - destinationGcsOutputFormatCSVCommaSeparatedValues := new(DestinationGcsOutputFormatCSVCommaSeparatedValues) + destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsOutputFormatCSVCommaSeparatedValues); err == nil { - u.DestinationGcsOutputFormatCSVCommaSeparatedValues = destinationGcsOutputFormatCSVCommaSeparatedValues - u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatCSVCommaSeparatedValues + if err := d.Decode(&destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { + u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON = destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON + u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON return nil } - destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON) + destinationGcsOutputFormatCSVCommaSeparatedValues := new(DestinationGcsOutputFormatCSVCommaSeparatedValues) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { - u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON = destinationGcsOutputFormatJSONLinesNewlineDelimitedJSON - u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON + if err := d.Decode(&destinationGcsOutputFormatCSVCommaSeparatedValues); err == nil { + u.DestinationGcsOutputFormatCSVCommaSeparatedValues = destinationGcsOutputFormatCSVCommaSeparatedValues + u.Type = DestinationGcsOutputFormatTypeDestinationGcsOutputFormatCSVCommaSeparatedValues return nil } @@ -1016,14 +1016,14 @@ func (u DestinationGcsOutputFormat) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationGcsOutputFormatAvroApacheAvro) } - if u.DestinationGcsOutputFormatCSVCommaSeparatedValues != nil { - return json.Marshal(u.DestinationGcsOutputFormatCSVCommaSeparatedValues) - } - if u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON != nil { return json.Marshal(u.DestinationGcsOutputFormatJSONLinesNewlineDelimitedJSON) } + if u.DestinationGcsOutputFormatCSVCommaSeparatedValues != nil { + return json.Marshal(u.DestinationGcsOutputFormatCSVCommaSeparatedValues) + } + if u.DestinationGcsOutputFormatParquetColumnarStorage != nil { return json.Marshal(u.DestinationGcsOutputFormatParquetColumnarStorage) } diff --git a/internal/sdk/pkg/models/shared/destinationgcsupdate.go b/internal/sdk/pkg/models/shared/destinationgcsupdate.go index 4ef27841c..e7b48050c 100755 --- a/internal/sdk/pkg/models/shared/destinationgcsupdate.go +++ b/internal/sdk/pkg/models/shared/destinationgcsupdate.go @@ -784,21 +784,30 @@ func (u *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec) Unmarsh return nil } - destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) + destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { - u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate - u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate + if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { + u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 + u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 return nil } - destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) + destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { - u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 - u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 + if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { + u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy + u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy + return nil + } + + destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { + u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate + u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate return nil } @@ -820,15 +829,6 @@ func (u *DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec) Unmarsh return nil } - destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { - u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy - u.Type = DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy - return nil - } - return errors.New("could not unmarshal into supported union types") } @@ -837,14 +837,18 @@ func (u DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec) MarshalJ return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression) } - if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil { - return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) - } - if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil { return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) } + if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil { + return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) + } + + if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil { + return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) + } + if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil { return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecXz) } @@ -853,10 +857,6 @@ func (u DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodec) MarshalJ return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecZstandard) } - if u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil { - return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) - } - return nil, nil } @@ -957,21 +957,21 @@ func (u *DestinationGcsUpdateOutputFormat) UnmarshalJSON(data []byte) error { return nil } - destinationGcsUpdateOutputFormatCSVCommaSeparatedValues := new(DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues) + destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsUpdateOutputFormatCSVCommaSeparatedValues); err == nil { - u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues = destinationGcsUpdateOutputFormatCSVCommaSeparatedValues - u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValues + if err := d.Decode(&destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { + u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON + u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON return nil } - destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON := new(DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON) + destinationGcsUpdateOutputFormatCSVCommaSeparatedValues := new(DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON); err == nil { - u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON = destinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON - u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON + if err := d.Decode(&destinationGcsUpdateOutputFormatCSVCommaSeparatedValues); err == nil { + u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues = destinationGcsUpdateOutputFormatCSVCommaSeparatedValues + u.Type = DestinationGcsUpdateOutputFormatTypeDestinationGcsUpdateOutputFormatCSVCommaSeparatedValues return nil } @@ -992,14 +992,14 @@ func (u DestinationGcsUpdateOutputFormat) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationGcsUpdateOutputFormatAvroApacheAvro) } - if u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues != nil { - return json.Marshal(u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues) - } - if u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON != nil { return json.Marshal(u.DestinationGcsUpdateOutputFormatJSONLinesNewlineDelimitedJSON) } + if u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues != nil { + return json.Marshal(u.DestinationGcsUpdateOutputFormatCSVCommaSeparatedValues) + } + if u.DestinationGcsUpdateOutputFormatParquetColumnarStorage != nil { return json.Marshal(u.DestinationGcsUpdateOutputFormatParquetColumnarStorage) } diff --git a/internal/sdk/pkg/models/shared/destinationlangchain.go b/internal/sdk/pkg/models/shared/destinationlangchain.go index 667f66ccd..8addc3858 100755 --- a/internal/sdk/pkg/models/shared/destinationlangchain.go +++ b/internal/sdk/pkg/models/shared/destinationlangchain.go @@ -127,21 +127,21 @@ func CreateDestinationLangchainEmbeddingDestinationLangchainEmbeddingFake(destin func (u *DestinationLangchainEmbedding) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationLangchainEmbeddingOpenAI := new(DestinationLangchainEmbeddingOpenAI) + destinationLangchainEmbeddingFake := new(DestinationLangchainEmbeddingFake) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationLangchainEmbeddingOpenAI); err == nil { - u.DestinationLangchainEmbeddingOpenAI = destinationLangchainEmbeddingOpenAI - u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingOpenAI + if err := d.Decode(&destinationLangchainEmbeddingFake); err == nil { + u.DestinationLangchainEmbeddingFake = destinationLangchainEmbeddingFake + u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingFake return nil } - destinationLangchainEmbeddingFake := new(DestinationLangchainEmbeddingFake) + destinationLangchainEmbeddingOpenAI := new(DestinationLangchainEmbeddingOpenAI) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationLangchainEmbeddingFake); err == nil { - u.DestinationLangchainEmbeddingFake = destinationLangchainEmbeddingFake - u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingFake + if err := d.Decode(&destinationLangchainEmbeddingOpenAI); err == nil { + u.DestinationLangchainEmbeddingOpenAI = destinationLangchainEmbeddingOpenAI + u.Type = DestinationLangchainEmbeddingTypeDestinationLangchainEmbeddingOpenAI return nil } @@ -149,14 +149,14 @@ func (u *DestinationLangchainEmbedding) UnmarshalJSON(data []byte) error { } func (u DestinationLangchainEmbedding) MarshalJSON() ([]byte, error) { - if u.DestinationLangchainEmbeddingOpenAI != nil { - return json.Marshal(u.DestinationLangchainEmbeddingOpenAI) - } - if u.DestinationLangchainEmbeddingFake != nil { return json.Marshal(u.DestinationLangchainEmbeddingFake) } + if u.DestinationLangchainEmbeddingOpenAI != nil { + return json.Marshal(u.DestinationLangchainEmbeddingOpenAI) + } + return nil, nil } @@ -304,15 +304,6 @@ func CreateDestinationLangchainIndexingDestinationLangchainIndexingChromaLocalPe func (u *DestinationLangchainIndexing) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationLangchainIndexingPinecone := new(DestinationLangchainIndexingPinecone) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationLangchainIndexingPinecone); err == nil { - u.DestinationLangchainIndexingPinecone = destinationLangchainIndexingPinecone - u.Type = DestinationLangchainIndexingTypeDestinationLangchainIndexingPinecone - return nil - } - destinationLangchainIndexingDocArrayHnswSearch := new(DestinationLangchainIndexingDocArrayHnswSearch) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() @@ -331,14 +322,19 @@ func (u *DestinationLangchainIndexing) UnmarshalJSON(data []byte) error { return nil } + destinationLangchainIndexingPinecone := new(DestinationLangchainIndexingPinecone) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationLangchainIndexingPinecone); err == nil { + u.DestinationLangchainIndexingPinecone = destinationLangchainIndexingPinecone + u.Type = DestinationLangchainIndexingTypeDestinationLangchainIndexingPinecone + return nil + } + return errors.New("could not unmarshal into supported union types") } func (u DestinationLangchainIndexing) MarshalJSON() ([]byte, error) { - if u.DestinationLangchainIndexingPinecone != nil { - return json.Marshal(u.DestinationLangchainIndexingPinecone) - } - if u.DestinationLangchainIndexingDocArrayHnswSearch != nil { return json.Marshal(u.DestinationLangchainIndexingDocArrayHnswSearch) } @@ -347,6 +343,10 @@ func (u DestinationLangchainIndexing) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationLangchainIndexingChromaLocalPersistance) } + if u.DestinationLangchainIndexingPinecone != nil { + return json.Marshal(u.DestinationLangchainIndexingPinecone) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationlangchainupdate.go b/internal/sdk/pkg/models/shared/destinationlangchainupdate.go index d3b7bfa8e..d1c96f7bc 100755 --- a/internal/sdk/pkg/models/shared/destinationlangchainupdate.go +++ b/internal/sdk/pkg/models/shared/destinationlangchainupdate.go @@ -103,21 +103,21 @@ func CreateDestinationLangchainUpdateEmbeddingDestinationLangchainUpdateEmbeddin func (u *DestinationLangchainUpdateEmbedding) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationLangchainUpdateEmbeddingOpenAI := new(DestinationLangchainUpdateEmbeddingOpenAI) + destinationLangchainUpdateEmbeddingFake := new(DestinationLangchainUpdateEmbeddingFake) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationLangchainUpdateEmbeddingOpenAI); err == nil { - u.DestinationLangchainUpdateEmbeddingOpenAI = destinationLangchainUpdateEmbeddingOpenAI - u.Type = DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingOpenAI + if err := d.Decode(&destinationLangchainUpdateEmbeddingFake); err == nil { + u.DestinationLangchainUpdateEmbeddingFake = destinationLangchainUpdateEmbeddingFake + u.Type = DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingFake return nil } - destinationLangchainUpdateEmbeddingFake := new(DestinationLangchainUpdateEmbeddingFake) + destinationLangchainUpdateEmbeddingOpenAI := new(DestinationLangchainUpdateEmbeddingOpenAI) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationLangchainUpdateEmbeddingFake); err == nil { - u.DestinationLangchainUpdateEmbeddingFake = destinationLangchainUpdateEmbeddingFake - u.Type = DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingFake + if err := d.Decode(&destinationLangchainUpdateEmbeddingOpenAI); err == nil { + u.DestinationLangchainUpdateEmbeddingOpenAI = destinationLangchainUpdateEmbeddingOpenAI + u.Type = DestinationLangchainUpdateEmbeddingTypeDestinationLangchainUpdateEmbeddingOpenAI return nil } @@ -125,14 +125,14 @@ func (u *DestinationLangchainUpdateEmbedding) UnmarshalJSON(data []byte) error { } func (u DestinationLangchainUpdateEmbedding) MarshalJSON() ([]byte, error) { - if u.DestinationLangchainUpdateEmbeddingOpenAI != nil { - return json.Marshal(u.DestinationLangchainUpdateEmbeddingOpenAI) - } - if u.DestinationLangchainUpdateEmbeddingFake != nil { return json.Marshal(u.DestinationLangchainUpdateEmbeddingFake) } + if u.DestinationLangchainUpdateEmbeddingOpenAI != nil { + return json.Marshal(u.DestinationLangchainUpdateEmbeddingOpenAI) + } + return nil, nil } @@ -280,15 +280,6 @@ func CreateDestinationLangchainUpdateIndexingDestinationLangchainUpdateIndexingC func (u *DestinationLangchainUpdateIndexing) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationLangchainUpdateIndexingPinecone := new(DestinationLangchainUpdateIndexingPinecone) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationLangchainUpdateIndexingPinecone); err == nil { - u.DestinationLangchainUpdateIndexingPinecone = destinationLangchainUpdateIndexingPinecone - u.Type = DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingPinecone - return nil - } - destinationLangchainUpdateIndexingDocArrayHnswSearch := new(DestinationLangchainUpdateIndexingDocArrayHnswSearch) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() @@ -307,14 +298,19 @@ func (u *DestinationLangchainUpdateIndexing) UnmarshalJSON(data []byte) error { return nil } + destinationLangchainUpdateIndexingPinecone := new(DestinationLangchainUpdateIndexingPinecone) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationLangchainUpdateIndexingPinecone); err == nil { + u.DestinationLangchainUpdateIndexingPinecone = destinationLangchainUpdateIndexingPinecone + u.Type = DestinationLangchainUpdateIndexingTypeDestinationLangchainUpdateIndexingPinecone + return nil + } + return errors.New("could not unmarshal into supported union types") } func (u DestinationLangchainUpdateIndexing) MarshalJSON() ([]byte, error) { - if u.DestinationLangchainUpdateIndexingPinecone != nil { - return json.Marshal(u.DestinationLangchainUpdateIndexingPinecone) - } - if u.DestinationLangchainUpdateIndexingDocArrayHnswSearch != nil { return json.Marshal(u.DestinationLangchainUpdateIndexingDocArrayHnswSearch) } @@ -323,6 +319,10 @@ func (u DestinationLangchainUpdateIndexing) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationLangchainUpdateIndexingChromaLocalPersistance) } + if u.DestinationLangchainUpdateIndexingPinecone != nil { + return json.Marshal(u.DestinationLangchainUpdateIndexingPinecone) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationmilvus.go b/internal/sdk/pkg/models/shared/destinationmilvus.go new file mode 100755 index 000000000..a664a9212 --- /dev/null +++ b/internal/sdk/pkg/models/shared/destinationmilvus.go @@ -0,0 +1,493 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package shared + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" +) + +type DestinationMilvusMilvus string + +const ( + DestinationMilvusMilvusMilvus DestinationMilvusMilvus = "milvus" +) + +func (e DestinationMilvusMilvus) ToPointer() *DestinationMilvusMilvus { + return &e +} + +func (e *DestinationMilvusMilvus) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "milvus": + *e = DestinationMilvusMilvus(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusMilvus: %v", v) + } +} + +type DestinationMilvusEmbeddingFromFieldMode string + +const ( + DestinationMilvusEmbeddingFromFieldModeFromField DestinationMilvusEmbeddingFromFieldMode = "from_field" +) + +func (e DestinationMilvusEmbeddingFromFieldMode) ToPointer() *DestinationMilvusEmbeddingFromFieldMode { + return &e +} + +func (e *DestinationMilvusEmbeddingFromFieldMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "from_field": + *e = DestinationMilvusEmbeddingFromFieldMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusEmbeddingFromFieldMode: %v", v) + } +} + +// DestinationMilvusEmbeddingFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. +type DestinationMilvusEmbeddingFromField struct { + // The number of dimensions the embedding model is generating + Dimensions int64 `json:"dimensions"` + // Name of the field in the record that contains the embedding + FieldName string `json:"field_name"` + Mode *DestinationMilvusEmbeddingFromFieldMode `json:"mode,omitempty"` +} + +type DestinationMilvusEmbeddingFakeMode string + +const ( + DestinationMilvusEmbeddingFakeModeFake DestinationMilvusEmbeddingFakeMode = "fake" +) + +func (e DestinationMilvusEmbeddingFakeMode) ToPointer() *DestinationMilvusEmbeddingFakeMode { + return &e +} + +func (e *DestinationMilvusEmbeddingFakeMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "fake": + *e = DestinationMilvusEmbeddingFakeMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusEmbeddingFakeMode: %v", v) + } +} + +// DestinationMilvusEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. +type DestinationMilvusEmbeddingFake struct { + Mode *DestinationMilvusEmbeddingFakeMode `json:"mode,omitempty"` +} + +type DestinationMilvusEmbeddingCohereMode string + +const ( + DestinationMilvusEmbeddingCohereModeCohere DestinationMilvusEmbeddingCohereMode = "cohere" +) + +func (e DestinationMilvusEmbeddingCohereMode) ToPointer() *DestinationMilvusEmbeddingCohereMode { + return &e +} + +func (e *DestinationMilvusEmbeddingCohereMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "cohere": + *e = DestinationMilvusEmbeddingCohereMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusEmbeddingCohereMode: %v", v) + } +} + +// DestinationMilvusEmbeddingCohere - Use the Cohere API to embed text. +type DestinationMilvusEmbeddingCohere struct { + CohereKey string `json:"cohere_key"` + Mode *DestinationMilvusEmbeddingCohereMode `json:"mode,omitempty"` +} + +type DestinationMilvusEmbeddingOpenAIMode string + +const ( + DestinationMilvusEmbeddingOpenAIModeOpenai DestinationMilvusEmbeddingOpenAIMode = "openai" +) + +func (e DestinationMilvusEmbeddingOpenAIMode) ToPointer() *DestinationMilvusEmbeddingOpenAIMode { + return &e +} + +func (e *DestinationMilvusEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "openai": + *e = DestinationMilvusEmbeddingOpenAIMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusEmbeddingOpenAIMode: %v", v) + } +} + +// DestinationMilvusEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. +type DestinationMilvusEmbeddingOpenAI struct { + Mode *DestinationMilvusEmbeddingOpenAIMode `json:"mode,omitempty"` + OpenaiKey string `json:"openai_key"` +} + +type DestinationMilvusEmbeddingType string + +const ( + DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingOpenAI DestinationMilvusEmbeddingType = "destination-milvus_Embedding_OpenAI" + DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingCohere DestinationMilvusEmbeddingType = "destination-milvus_Embedding_Cohere" + DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFake DestinationMilvusEmbeddingType = "destination-milvus_Embedding_Fake" + DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFromField DestinationMilvusEmbeddingType = "destination-milvus_Embedding_From Field" +) + +type DestinationMilvusEmbedding struct { + DestinationMilvusEmbeddingOpenAI *DestinationMilvusEmbeddingOpenAI + DestinationMilvusEmbeddingCohere *DestinationMilvusEmbeddingCohere + DestinationMilvusEmbeddingFake *DestinationMilvusEmbeddingFake + DestinationMilvusEmbeddingFromField *DestinationMilvusEmbeddingFromField + + Type DestinationMilvusEmbeddingType +} + +func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingOpenAI(destinationMilvusEmbeddingOpenAI DestinationMilvusEmbeddingOpenAI) DestinationMilvusEmbedding { + typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingOpenAI + + return DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingOpenAI: &destinationMilvusEmbeddingOpenAI, + Type: typ, + } +} + +func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingCohere(destinationMilvusEmbeddingCohere DestinationMilvusEmbeddingCohere) DestinationMilvusEmbedding { + typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingCohere + + return DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingCohere: &destinationMilvusEmbeddingCohere, + Type: typ, + } +} + +func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingFake(destinationMilvusEmbeddingFake DestinationMilvusEmbeddingFake) DestinationMilvusEmbedding { + typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFake + + return DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingFake: &destinationMilvusEmbeddingFake, + Type: typ, + } +} + +func CreateDestinationMilvusEmbeddingDestinationMilvusEmbeddingFromField(destinationMilvusEmbeddingFromField DestinationMilvusEmbeddingFromField) DestinationMilvusEmbedding { + typ := DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFromField + + return DestinationMilvusEmbedding{ + DestinationMilvusEmbeddingFromField: &destinationMilvusEmbeddingFromField, + Type: typ, + } +} + +func (u *DestinationMilvusEmbedding) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + destinationMilvusEmbeddingFake := new(DestinationMilvusEmbeddingFake) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusEmbeddingFake); err == nil { + u.DestinationMilvusEmbeddingFake = destinationMilvusEmbeddingFake + u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFake + return nil + } + + destinationMilvusEmbeddingOpenAI := new(DestinationMilvusEmbeddingOpenAI) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusEmbeddingOpenAI); err == nil { + u.DestinationMilvusEmbeddingOpenAI = destinationMilvusEmbeddingOpenAI + u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingOpenAI + return nil + } + + destinationMilvusEmbeddingCohere := new(DestinationMilvusEmbeddingCohere) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusEmbeddingCohere); err == nil { + u.DestinationMilvusEmbeddingCohere = destinationMilvusEmbeddingCohere + u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingCohere + return nil + } + + destinationMilvusEmbeddingFromField := new(DestinationMilvusEmbeddingFromField) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusEmbeddingFromField); err == nil { + u.DestinationMilvusEmbeddingFromField = destinationMilvusEmbeddingFromField + u.Type = DestinationMilvusEmbeddingTypeDestinationMilvusEmbeddingFromField + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u DestinationMilvusEmbedding) MarshalJSON() ([]byte, error) { + if u.DestinationMilvusEmbeddingFake != nil { + return json.Marshal(u.DestinationMilvusEmbeddingFake) + } + + if u.DestinationMilvusEmbeddingOpenAI != nil { + return json.Marshal(u.DestinationMilvusEmbeddingOpenAI) + } + + if u.DestinationMilvusEmbeddingCohere != nil { + return json.Marshal(u.DestinationMilvusEmbeddingCohere) + } + + if u.DestinationMilvusEmbeddingFromField != nil { + return json.Marshal(u.DestinationMilvusEmbeddingFromField) + } + + return nil, nil +} + +type DestinationMilvusIndexingAuthenticationNoAuthMode string + +const ( + DestinationMilvusIndexingAuthenticationNoAuthModeNoAuth DestinationMilvusIndexingAuthenticationNoAuthMode = "no_auth" +) + +func (e DestinationMilvusIndexingAuthenticationNoAuthMode) ToPointer() *DestinationMilvusIndexingAuthenticationNoAuthMode { + return &e +} + +func (e *DestinationMilvusIndexingAuthenticationNoAuthMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "no_auth": + *e = DestinationMilvusIndexingAuthenticationNoAuthMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusIndexingAuthenticationNoAuthMode: %v", v) + } +} + +// DestinationMilvusIndexingAuthenticationNoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) +type DestinationMilvusIndexingAuthenticationNoAuth struct { + Mode *DestinationMilvusIndexingAuthenticationNoAuthMode `json:"mode,omitempty"` +} + +type DestinationMilvusIndexingAuthenticationUsernamePasswordMode string + +const ( + DestinationMilvusIndexingAuthenticationUsernamePasswordModeUsernamePassword DestinationMilvusIndexingAuthenticationUsernamePasswordMode = "username_password" +) + +func (e DestinationMilvusIndexingAuthenticationUsernamePasswordMode) ToPointer() *DestinationMilvusIndexingAuthenticationUsernamePasswordMode { + return &e +} + +func (e *DestinationMilvusIndexingAuthenticationUsernamePasswordMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "username_password": + *e = DestinationMilvusIndexingAuthenticationUsernamePasswordMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusIndexingAuthenticationUsernamePasswordMode: %v", v) + } +} + +// DestinationMilvusIndexingAuthenticationUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters) +type DestinationMilvusIndexingAuthenticationUsernamePassword struct { + Mode *DestinationMilvusIndexingAuthenticationUsernamePasswordMode `json:"mode,omitempty"` + // Password for the Milvus instance + Password string `json:"password"` + // Username for the Milvus instance + Username string `json:"username"` +} + +type DestinationMilvusIndexingAuthenticationAPITokenMode string + +const ( + DestinationMilvusIndexingAuthenticationAPITokenModeToken DestinationMilvusIndexingAuthenticationAPITokenMode = "token" +) + +func (e DestinationMilvusIndexingAuthenticationAPITokenMode) ToPointer() *DestinationMilvusIndexingAuthenticationAPITokenMode { + return &e +} + +func (e *DestinationMilvusIndexingAuthenticationAPITokenMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "token": + *e = DestinationMilvusIndexingAuthenticationAPITokenMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusIndexingAuthenticationAPITokenMode: %v", v) + } +} + +// DestinationMilvusIndexingAuthenticationAPIToken - Authenticate using an API token (suitable for Zilliz Cloud) +type DestinationMilvusIndexingAuthenticationAPIToken struct { + Mode *DestinationMilvusIndexingAuthenticationAPITokenMode `json:"mode,omitempty"` + // API Token for the Milvus instance + Token string `json:"token"` +} + +type DestinationMilvusIndexingAuthenticationType string + +const ( + DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationAPIToken DestinationMilvusIndexingAuthenticationType = "destination-milvus_Indexing_Authentication_API Token" + DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationUsernamePassword DestinationMilvusIndexingAuthenticationType = "destination-milvus_Indexing_Authentication_Username/Password" + DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationNoAuth DestinationMilvusIndexingAuthenticationType = "destination-milvus_Indexing_Authentication_No auth" +) + +type DestinationMilvusIndexingAuthentication struct { + DestinationMilvusIndexingAuthenticationAPIToken *DestinationMilvusIndexingAuthenticationAPIToken + DestinationMilvusIndexingAuthenticationUsernamePassword *DestinationMilvusIndexingAuthenticationUsernamePassword + DestinationMilvusIndexingAuthenticationNoAuth *DestinationMilvusIndexingAuthenticationNoAuth + + Type DestinationMilvusIndexingAuthenticationType +} + +func CreateDestinationMilvusIndexingAuthenticationDestinationMilvusIndexingAuthenticationAPIToken(destinationMilvusIndexingAuthenticationAPIToken DestinationMilvusIndexingAuthenticationAPIToken) DestinationMilvusIndexingAuthentication { + typ := DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationAPIToken + + return DestinationMilvusIndexingAuthentication{ + DestinationMilvusIndexingAuthenticationAPIToken: &destinationMilvusIndexingAuthenticationAPIToken, + Type: typ, + } +} + +func CreateDestinationMilvusIndexingAuthenticationDestinationMilvusIndexingAuthenticationUsernamePassword(destinationMilvusIndexingAuthenticationUsernamePassword DestinationMilvusIndexingAuthenticationUsernamePassword) DestinationMilvusIndexingAuthentication { + typ := DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationUsernamePassword + + return DestinationMilvusIndexingAuthentication{ + DestinationMilvusIndexingAuthenticationUsernamePassword: &destinationMilvusIndexingAuthenticationUsernamePassword, + Type: typ, + } +} + +func CreateDestinationMilvusIndexingAuthenticationDestinationMilvusIndexingAuthenticationNoAuth(destinationMilvusIndexingAuthenticationNoAuth DestinationMilvusIndexingAuthenticationNoAuth) DestinationMilvusIndexingAuthentication { + typ := DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationNoAuth + + return DestinationMilvusIndexingAuthentication{ + DestinationMilvusIndexingAuthenticationNoAuth: &destinationMilvusIndexingAuthenticationNoAuth, + Type: typ, + } +} + +func (u *DestinationMilvusIndexingAuthentication) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + destinationMilvusIndexingAuthenticationNoAuth := new(DestinationMilvusIndexingAuthenticationNoAuth) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusIndexingAuthenticationNoAuth); err == nil { + u.DestinationMilvusIndexingAuthenticationNoAuth = destinationMilvusIndexingAuthenticationNoAuth + u.Type = DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationNoAuth + return nil + } + + destinationMilvusIndexingAuthenticationAPIToken := new(DestinationMilvusIndexingAuthenticationAPIToken) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusIndexingAuthenticationAPIToken); err == nil { + u.DestinationMilvusIndexingAuthenticationAPIToken = destinationMilvusIndexingAuthenticationAPIToken + u.Type = DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationAPIToken + return nil + } + + destinationMilvusIndexingAuthenticationUsernamePassword := new(DestinationMilvusIndexingAuthenticationUsernamePassword) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusIndexingAuthenticationUsernamePassword); err == nil { + u.DestinationMilvusIndexingAuthenticationUsernamePassword = destinationMilvusIndexingAuthenticationUsernamePassword + u.Type = DestinationMilvusIndexingAuthenticationTypeDestinationMilvusIndexingAuthenticationUsernamePassword + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u DestinationMilvusIndexingAuthentication) MarshalJSON() ([]byte, error) { + if u.DestinationMilvusIndexingAuthenticationNoAuth != nil { + return json.Marshal(u.DestinationMilvusIndexingAuthenticationNoAuth) + } + + if u.DestinationMilvusIndexingAuthenticationAPIToken != nil { + return json.Marshal(u.DestinationMilvusIndexingAuthenticationAPIToken) + } + + if u.DestinationMilvusIndexingAuthenticationUsernamePassword != nil { + return json.Marshal(u.DestinationMilvusIndexingAuthenticationUsernamePassword) + } + + return nil, nil +} + +// DestinationMilvusIndexing - Indexing configuration +type DestinationMilvusIndexing struct { + // Authentication method + Auth DestinationMilvusIndexingAuthentication `json:"auth"` + // The collection to load data into + Collection string `json:"collection"` + // The database to connect to + Db *string `json:"db,omitempty"` + // The public endpoint of the Milvus instance. + Host string `json:"host"` + // The field in the entity that contains the embedded text + TextField *string `json:"text_field,omitempty"` + // The field in the entity that contains the vector + VectorField *string `json:"vector_field,omitempty"` +} + +type DestinationMilvusProcessingConfigModel struct { + // Size of overlap between chunks in tokens to store in vector store to better capture relevant context + ChunkOverlap *int64 `json:"chunk_overlap,omitempty"` + // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + ChunkSize int64 `json:"chunk_size"` + // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. + MetadataFields []string `json:"metadata_fields,omitempty"` + // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + TextFields []string `json:"text_fields,omitempty"` +} + +type DestinationMilvus struct { + DestinationType DestinationMilvusMilvus `json:"destinationType"` + // Embedding configuration + Embedding DestinationMilvusEmbedding `json:"embedding"` + // Indexing configuration + Indexing DestinationMilvusIndexing `json:"indexing"` + Processing DestinationMilvusProcessingConfigModel `json:"processing"` +} diff --git a/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go b/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go new file mode 100755 index 000000000..6f1af0cba --- /dev/null +++ b/internal/sdk/pkg/models/shared/destinationmilvuscreaterequest.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package shared + +type DestinationMilvusCreateRequest struct { + Configuration DestinationMilvus `json:"configuration"` + Name string `json:"name"` + WorkspaceID string `json:"workspaceId"` +} diff --git a/internal/sdk/pkg/models/shared/sourceopenweatherputrequest.go b/internal/sdk/pkg/models/shared/destinationmilvusputrequest.go similarity index 66% rename from internal/sdk/pkg/models/shared/sourceopenweatherputrequest.go rename to internal/sdk/pkg/models/shared/destinationmilvusputrequest.go index 92e865380..ba3946485 100755 --- a/internal/sdk/pkg/models/shared/sourceopenweatherputrequest.go +++ b/internal/sdk/pkg/models/shared/destinationmilvusputrequest.go @@ -2,8 +2,8 @@ package shared -type SourceOpenweatherPutRequest struct { - Configuration SourceOpenweatherUpdate `json:"configuration"` +type DestinationMilvusPutRequest struct { + Configuration DestinationMilvusUpdate `json:"configuration"` Name string `json:"name"` WorkspaceID string `json:"workspaceId"` } diff --git a/internal/sdk/pkg/models/shared/destinationmilvusupdate.go b/internal/sdk/pkg/models/shared/destinationmilvusupdate.go new file mode 100755 index 000000000..4a20582ee --- /dev/null +++ b/internal/sdk/pkg/models/shared/destinationmilvusupdate.go @@ -0,0 +1,468 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package shared + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" +) + +type DestinationMilvusUpdateEmbeddingFromFieldMode string + +const ( + DestinationMilvusUpdateEmbeddingFromFieldModeFromField DestinationMilvusUpdateEmbeddingFromFieldMode = "from_field" +) + +func (e DestinationMilvusUpdateEmbeddingFromFieldMode) ToPointer() *DestinationMilvusUpdateEmbeddingFromFieldMode { + return &e +} + +func (e *DestinationMilvusUpdateEmbeddingFromFieldMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "from_field": + *e = DestinationMilvusUpdateEmbeddingFromFieldMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingFromFieldMode: %v", v) + } +} + +// DestinationMilvusUpdateEmbeddingFromField - Use a field in the record as the embedding. This is useful if you already have an embedding for your data and want to store it in the vector store. +type DestinationMilvusUpdateEmbeddingFromField struct { + // The number of dimensions the embedding model is generating + Dimensions int64 `json:"dimensions"` + // Name of the field in the record that contains the embedding + FieldName string `json:"field_name"` + Mode *DestinationMilvusUpdateEmbeddingFromFieldMode `json:"mode,omitempty"` +} + +type DestinationMilvusUpdateEmbeddingFakeMode string + +const ( + DestinationMilvusUpdateEmbeddingFakeModeFake DestinationMilvusUpdateEmbeddingFakeMode = "fake" +) + +func (e DestinationMilvusUpdateEmbeddingFakeMode) ToPointer() *DestinationMilvusUpdateEmbeddingFakeMode { + return &e +} + +func (e *DestinationMilvusUpdateEmbeddingFakeMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "fake": + *e = DestinationMilvusUpdateEmbeddingFakeMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingFakeMode: %v", v) + } +} + +// DestinationMilvusUpdateEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. +type DestinationMilvusUpdateEmbeddingFake struct { + Mode *DestinationMilvusUpdateEmbeddingFakeMode `json:"mode,omitempty"` +} + +type DestinationMilvusUpdateEmbeddingCohereMode string + +const ( + DestinationMilvusUpdateEmbeddingCohereModeCohere DestinationMilvusUpdateEmbeddingCohereMode = "cohere" +) + +func (e DestinationMilvusUpdateEmbeddingCohereMode) ToPointer() *DestinationMilvusUpdateEmbeddingCohereMode { + return &e +} + +func (e *DestinationMilvusUpdateEmbeddingCohereMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "cohere": + *e = DestinationMilvusUpdateEmbeddingCohereMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingCohereMode: %v", v) + } +} + +// DestinationMilvusUpdateEmbeddingCohere - Use the Cohere API to embed text. +type DestinationMilvusUpdateEmbeddingCohere struct { + CohereKey string `json:"cohere_key"` + Mode *DestinationMilvusUpdateEmbeddingCohereMode `json:"mode,omitempty"` +} + +type DestinationMilvusUpdateEmbeddingOpenAIMode string + +const ( + DestinationMilvusUpdateEmbeddingOpenAIModeOpenai DestinationMilvusUpdateEmbeddingOpenAIMode = "openai" +) + +func (e DestinationMilvusUpdateEmbeddingOpenAIMode) ToPointer() *DestinationMilvusUpdateEmbeddingOpenAIMode { + return &e +} + +func (e *DestinationMilvusUpdateEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "openai": + *e = DestinationMilvusUpdateEmbeddingOpenAIMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateEmbeddingOpenAIMode: %v", v) + } +} + +// DestinationMilvusUpdateEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. +type DestinationMilvusUpdateEmbeddingOpenAI struct { + Mode *DestinationMilvusUpdateEmbeddingOpenAIMode `json:"mode,omitempty"` + OpenaiKey string `json:"openai_key"` +} + +type DestinationMilvusUpdateEmbeddingType string + +const ( + DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingOpenAI DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_OpenAI" + DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingCohere DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_Cohere" + DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFake DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_Fake" + DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFromField DestinationMilvusUpdateEmbeddingType = "destination-milvus-update_Embedding_From Field" +) + +type DestinationMilvusUpdateEmbedding struct { + DestinationMilvusUpdateEmbeddingOpenAI *DestinationMilvusUpdateEmbeddingOpenAI + DestinationMilvusUpdateEmbeddingCohere *DestinationMilvusUpdateEmbeddingCohere + DestinationMilvusUpdateEmbeddingFake *DestinationMilvusUpdateEmbeddingFake + DestinationMilvusUpdateEmbeddingFromField *DestinationMilvusUpdateEmbeddingFromField + + Type DestinationMilvusUpdateEmbeddingType +} + +func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingOpenAI(destinationMilvusUpdateEmbeddingOpenAI DestinationMilvusUpdateEmbeddingOpenAI) DestinationMilvusUpdateEmbedding { + typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingOpenAI + + return DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingOpenAI: &destinationMilvusUpdateEmbeddingOpenAI, + Type: typ, + } +} + +func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingCohere(destinationMilvusUpdateEmbeddingCohere DestinationMilvusUpdateEmbeddingCohere) DestinationMilvusUpdateEmbedding { + typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingCohere + + return DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingCohere: &destinationMilvusUpdateEmbeddingCohere, + Type: typ, + } +} + +func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingFake(destinationMilvusUpdateEmbeddingFake DestinationMilvusUpdateEmbeddingFake) DestinationMilvusUpdateEmbedding { + typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFake + + return DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingFake: &destinationMilvusUpdateEmbeddingFake, + Type: typ, + } +} + +func CreateDestinationMilvusUpdateEmbeddingDestinationMilvusUpdateEmbeddingFromField(destinationMilvusUpdateEmbeddingFromField DestinationMilvusUpdateEmbeddingFromField) DestinationMilvusUpdateEmbedding { + typ := DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFromField + + return DestinationMilvusUpdateEmbedding{ + DestinationMilvusUpdateEmbeddingFromField: &destinationMilvusUpdateEmbeddingFromField, + Type: typ, + } +} + +func (u *DestinationMilvusUpdateEmbedding) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + destinationMilvusUpdateEmbeddingFake := new(DestinationMilvusUpdateEmbeddingFake) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateEmbeddingFake); err == nil { + u.DestinationMilvusUpdateEmbeddingFake = destinationMilvusUpdateEmbeddingFake + u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFake + return nil + } + + destinationMilvusUpdateEmbeddingOpenAI := new(DestinationMilvusUpdateEmbeddingOpenAI) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateEmbeddingOpenAI); err == nil { + u.DestinationMilvusUpdateEmbeddingOpenAI = destinationMilvusUpdateEmbeddingOpenAI + u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingOpenAI + return nil + } + + destinationMilvusUpdateEmbeddingCohere := new(DestinationMilvusUpdateEmbeddingCohere) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateEmbeddingCohere); err == nil { + u.DestinationMilvusUpdateEmbeddingCohere = destinationMilvusUpdateEmbeddingCohere + u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingCohere + return nil + } + + destinationMilvusUpdateEmbeddingFromField := new(DestinationMilvusUpdateEmbeddingFromField) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateEmbeddingFromField); err == nil { + u.DestinationMilvusUpdateEmbeddingFromField = destinationMilvusUpdateEmbeddingFromField + u.Type = DestinationMilvusUpdateEmbeddingTypeDestinationMilvusUpdateEmbeddingFromField + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u DestinationMilvusUpdateEmbedding) MarshalJSON() ([]byte, error) { + if u.DestinationMilvusUpdateEmbeddingFake != nil { + return json.Marshal(u.DestinationMilvusUpdateEmbeddingFake) + } + + if u.DestinationMilvusUpdateEmbeddingOpenAI != nil { + return json.Marshal(u.DestinationMilvusUpdateEmbeddingOpenAI) + } + + if u.DestinationMilvusUpdateEmbeddingCohere != nil { + return json.Marshal(u.DestinationMilvusUpdateEmbeddingCohere) + } + + if u.DestinationMilvusUpdateEmbeddingFromField != nil { + return json.Marshal(u.DestinationMilvusUpdateEmbeddingFromField) + } + + return nil, nil +} + +type DestinationMilvusUpdateIndexingAuthenticationNoAuthMode string + +const ( + DestinationMilvusUpdateIndexingAuthenticationNoAuthModeNoAuth DestinationMilvusUpdateIndexingAuthenticationNoAuthMode = "no_auth" +) + +func (e DestinationMilvusUpdateIndexingAuthenticationNoAuthMode) ToPointer() *DestinationMilvusUpdateIndexingAuthenticationNoAuthMode { + return &e +} + +func (e *DestinationMilvusUpdateIndexingAuthenticationNoAuthMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "no_auth": + *e = DestinationMilvusUpdateIndexingAuthenticationNoAuthMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateIndexingAuthenticationNoAuthMode: %v", v) + } +} + +// DestinationMilvusUpdateIndexingAuthenticationNoAuth - Do not authenticate (suitable for locally running test clusters, do not use for clusters with public IP addresses) +type DestinationMilvusUpdateIndexingAuthenticationNoAuth struct { + Mode *DestinationMilvusUpdateIndexingAuthenticationNoAuthMode `json:"mode,omitempty"` +} + +type DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode string + +const ( + DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordModeUsernamePassword DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode = "username_password" +) + +func (e DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode) ToPointer() *DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode { + return &e +} + +func (e *DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "username_password": + *e = DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode: %v", v) + } +} + +// DestinationMilvusUpdateIndexingAuthenticationUsernamePassword - Authenticate using username and password (suitable for self-managed Milvus clusters) +type DestinationMilvusUpdateIndexingAuthenticationUsernamePassword struct { + Mode *DestinationMilvusUpdateIndexingAuthenticationUsernamePasswordMode `json:"mode,omitempty"` + // Password for the Milvus instance + Password string `json:"password"` + // Username for the Milvus instance + Username string `json:"username"` +} + +type DestinationMilvusUpdateIndexingAuthenticationAPITokenMode string + +const ( + DestinationMilvusUpdateIndexingAuthenticationAPITokenModeToken DestinationMilvusUpdateIndexingAuthenticationAPITokenMode = "token" +) + +func (e DestinationMilvusUpdateIndexingAuthenticationAPITokenMode) ToPointer() *DestinationMilvusUpdateIndexingAuthenticationAPITokenMode { + return &e +} + +func (e *DestinationMilvusUpdateIndexingAuthenticationAPITokenMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "token": + *e = DestinationMilvusUpdateIndexingAuthenticationAPITokenMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationMilvusUpdateIndexingAuthenticationAPITokenMode: %v", v) + } +} + +// DestinationMilvusUpdateIndexingAuthenticationAPIToken - Authenticate using an API token (suitable for Zilliz Cloud) +type DestinationMilvusUpdateIndexingAuthenticationAPIToken struct { + Mode *DestinationMilvusUpdateIndexingAuthenticationAPITokenMode `json:"mode,omitempty"` + // API Token for the Milvus instance + Token string `json:"token"` +} + +type DestinationMilvusUpdateIndexingAuthenticationType string + +const ( + DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationAPIToken DestinationMilvusUpdateIndexingAuthenticationType = "destination-milvus-update_Indexing_Authentication_API Token" + DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationUsernamePassword DestinationMilvusUpdateIndexingAuthenticationType = "destination-milvus-update_Indexing_Authentication_Username/Password" + DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationNoAuth DestinationMilvusUpdateIndexingAuthenticationType = "destination-milvus-update_Indexing_Authentication_No auth" +) + +type DestinationMilvusUpdateIndexingAuthentication struct { + DestinationMilvusUpdateIndexingAuthenticationAPIToken *DestinationMilvusUpdateIndexingAuthenticationAPIToken + DestinationMilvusUpdateIndexingAuthenticationUsernamePassword *DestinationMilvusUpdateIndexingAuthenticationUsernamePassword + DestinationMilvusUpdateIndexingAuthenticationNoAuth *DestinationMilvusUpdateIndexingAuthenticationNoAuth + + Type DestinationMilvusUpdateIndexingAuthenticationType +} + +func CreateDestinationMilvusUpdateIndexingAuthenticationDestinationMilvusUpdateIndexingAuthenticationAPIToken(destinationMilvusUpdateIndexingAuthenticationAPIToken DestinationMilvusUpdateIndexingAuthenticationAPIToken) DestinationMilvusUpdateIndexingAuthentication { + typ := DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationAPIToken + + return DestinationMilvusUpdateIndexingAuthentication{ + DestinationMilvusUpdateIndexingAuthenticationAPIToken: &destinationMilvusUpdateIndexingAuthenticationAPIToken, + Type: typ, + } +} + +func CreateDestinationMilvusUpdateIndexingAuthenticationDestinationMilvusUpdateIndexingAuthenticationUsernamePassword(destinationMilvusUpdateIndexingAuthenticationUsernamePassword DestinationMilvusUpdateIndexingAuthenticationUsernamePassword) DestinationMilvusUpdateIndexingAuthentication { + typ := DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationUsernamePassword + + return DestinationMilvusUpdateIndexingAuthentication{ + DestinationMilvusUpdateIndexingAuthenticationUsernamePassword: &destinationMilvusUpdateIndexingAuthenticationUsernamePassword, + Type: typ, + } +} + +func CreateDestinationMilvusUpdateIndexingAuthenticationDestinationMilvusUpdateIndexingAuthenticationNoAuth(destinationMilvusUpdateIndexingAuthenticationNoAuth DestinationMilvusUpdateIndexingAuthenticationNoAuth) DestinationMilvusUpdateIndexingAuthentication { + typ := DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationNoAuth + + return DestinationMilvusUpdateIndexingAuthentication{ + DestinationMilvusUpdateIndexingAuthenticationNoAuth: &destinationMilvusUpdateIndexingAuthenticationNoAuth, + Type: typ, + } +} + +func (u *DestinationMilvusUpdateIndexingAuthentication) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + destinationMilvusUpdateIndexingAuthenticationNoAuth := new(DestinationMilvusUpdateIndexingAuthenticationNoAuth) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateIndexingAuthenticationNoAuth); err == nil { + u.DestinationMilvusUpdateIndexingAuthenticationNoAuth = destinationMilvusUpdateIndexingAuthenticationNoAuth + u.Type = DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationNoAuth + return nil + } + + destinationMilvusUpdateIndexingAuthenticationAPIToken := new(DestinationMilvusUpdateIndexingAuthenticationAPIToken) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateIndexingAuthenticationAPIToken); err == nil { + u.DestinationMilvusUpdateIndexingAuthenticationAPIToken = destinationMilvusUpdateIndexingAuthenticationAPIToken + u.Type = DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationAPIToken + return nil + } + + destinationMilvusUpdateIndexingAuthenticationUsernamePassword := new(DestinationMilvusUpdateIndexingAuthenticationUsernamePassword) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMilvusUpdateIndexingAuthenticationUsernamePassword); err == nil { + u.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword = destinationMilvusUpdateIndexingAuthenticationUsernamePassword + u.Type = DestinationMilvusUpdateIndexingAuthenticationTypeDestinationMilvusUpdateIndexingAuthenticationUsernamePassword + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u DestinationMilvusUpdateIndexingAuthentication) MarshalJSON() ([]byte, error) { + if u.DestinationMilvusUpdateIndexingAuthenticationNoAuth != nil { + return json.Marshal(u.DestinationMilvusUpdateIndexingAuthenticationNoAuth) + } + + if u.DestinationMilvusUpdateIndexingAuthenticationAPIToken != nil { + return json.Marshal(u.DestinationMilvusUpdateIndexingAuthenticationAPIToken) + } + + if u.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword != nil { + return json.Marshal(u.DestinationMilvusUpdateIndexingAuthenticationUsernamePassword) + } + + return nil, nil +} + +// DestinationMilvusUpdateIndexing - Indexing configuration +type DestinationMilvusUpdateIndexing struct { + // Authentication method + Auth DestinationMilvusUpdateIndexingAuthentication `json:"auth"` + // The collection to load data into + Collection string `json:"collection"` + // The database to connect to + Db *string `json:"db,omitempty"` + // The public endpoint of the Milvus instance. + Host string `json:"host"` + // The field in the entity that contains the embedded text + TextField *string `json:"text_field,omitempty"` + // The field in the entity that contains the vector + VectorField *string `json:"vector_field,omitempty"` +} + +type DestinationMilvusUpdateProcessingConfigModel struct { + // Size of overlap between chunks in tokens to store in vector store to better capture relevant context + ChunkOverlap *int64 `json:"chunk_overlap,omitempty"` + // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + ChunkSize int64 `json:"chunk_size"` + // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. + MetadataFields []string `json:"metadata_fields,omitempty"` + // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + TextFields []string `json:"text_fields,omitempty"` +} + +type DestinationMilvusUpdate struct { + // Embedding configuration + Embedding DestinationMilvusUpdateEmbedding `json:"embedding"` + // Indexing configuration + Indexing DestinationMilvusUpdateIndexing `json:"indexing"` + Processing DestinationMilvusUpdateProcessingConfigModel `json:"processing"` +} diff --git a/internal/sdk/pkg/models/shared/destinationmongodb.go b/internal/sdk/pkg/models/shared/destinationmongodb.go index 57bc7b4cf..b3edae1a1 100755 --- a/internal/sdk/pkg/models/shared/destinationmongodb.go +++ b/internal/sdk/pkg/models/shared/destinationmongodb.go @@ -306,6 +306,15 @@ func CreateDestinationMongodbMongoDbInstanceTypeDestinationMongodbMongoDBInstanc func (u *DestinationMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error { var d *json.Decoder + destinationMongodbMongoDBInstanceTypeMongoDBAtlas := new(DestinationMongodbMongoDBInstanceTypeMongoDBAtlas) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMongodbMongoDBInstanceTypeMongoDBAtlas); err == nil { + u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas = destinationMongodbMongoDBInstanceTypeMongoDBAtlas + u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBInstanceTypeMongoDBAtlas + return nil + } + destinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance := new(DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() @@ -324,19 +333,14 @@ func (u *DestinationMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error return nil } - destinationMongodbMongoDBInstanceTypeMongoDBAtlas := new(DestinationMongodbMongoDBInstanceTypeMongoDBAtlas) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationMongodbMongoDBInstanceTypeMongoDBAtlas); err == nil { - u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas = destinationMongodbMongoDBInstanceTypeMongoDBAtlas - u.Type = DestinationMongodbMongoDbInstanceTypeTypeDestinationMongodbMongoDBInstanceTypeMongoDBAtlas - return nil - } - return errors.New("could not unmarshal into supported union types") } func (u DestinationMongodbMongoDbInstanceType) MarshalJSON() ([]byte, error) { + if u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas != nil { + return json.Marshal(u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas) + } + if u.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil { return json.Marshal(u.DestinationMongodbMongoDbInstanceTypeStandaloneMongoDbInstance) } @@ -345,10 +349,6 @@ func (u DestinationMongodbMongoDbInstanceType) MarshalJSON() ([]byte, error) { return json.Marshal(u.DestinationMongodbMongoDbInstanceTypeReplicaSet) } - if u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas != nil { - return json.Marshal(u.DestinationMongodbMongoDBInstanceTypeMongoDBAtlas) - } - return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationmongodbupdate.go b/internal/sdk/pkg/models/shared/destinationmongodbupdate.go index 15723eb00..acd08fa23 100755 --- a/internal/sdk/pkg/models/shared/destinationmongodbupdate.go +++ b/internal/sdk/pkg/models/shared/destinationmongodbupdate.go @@ -282,6 +282,15 @@ func CreateDestinationMongodbUpdateMongoDbInstanceTypeDestinationMongodbUpdateMo func (u *DestinationMongodbUpdateMongoDbInstanceType) UnmarshalJSON(data []byte) error { var d *json.Decoder + destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas := new(DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas); err == nil { + u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas + u.Type = DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas + return nil + } + destinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance := new(DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() @@ -300,19 +309,14 @@ func (u *DestinationMongodbUpdateMongoDbInstanceType) UnmarshalJSON(data []byte) return nil } - destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas := new(DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas); err == nil { - u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = destinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas - u.Type = DestinationMongodbUpdateMongoDbInstanceTypeTypeDestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas - return nil - } - return errors.New("could not unmarshal into supported union types") } func (u DestinationMongodbUpdateMongoDbInstanceType) MarshalJSON() ([]byte, error) { + if u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil { + return json.Marshal(u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) + } + if u.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil { return json.Marshal(u.DestinationMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance) } @@ -321,10 +325,6 @@ func (u DestinationMongodbUpdateMongoDbInstanceType) MarshalJSON() ([]byte, erro return json.Marshal(u.DestinationMongodbUpdateMongoDbInstanceTypeReplicaSet) } - if u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil { - return json.Marshal(u.DestinationMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) - } - return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationpinecone.go b/internal/sdk/pkg/models/shared/destinationpinecone.go new file mode 100755 index 000000000..81f24ac74 --- /dev/null +++ b/internal/sdk/pkg/models/shared/destinationpinecone.go @@ -0,0 +1,244 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package shared + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" +) + +type DestinationPineconePinecone string + +const ( + DestinationPineconePineconePinecone DestinationPineconePinecone = "pinecone" +) + +func (e DestinationPineconePinecone) ToPointer() *DestinationPineconePinecone { + return &e +} + +func (e *DestinationPineconePinecone) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "pinecone": + *e = DestinationPineconePinecone(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconePinecone: %v", v) + } +} + +type DestinationPineconeEmbeddingFakeMode string + +const ( + DestinationPineconeEmbeddingFakeModeFake DestinationPineconeEmbeddingFakeMode = "fake" +) + +func (e DestinationPineconeEmbeddingFakeMode) ToPointer() *DestinationPineconeEmbeddingFakeMode { + return &e +} + +func (e *DestinationPineconeEmbeddingFakeMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "fake": + *e = DestinationPineconeEmbeddingFakeMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconeEmbeddingFakeMode: %v", v) + } +} + +// DestinationPineconeEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. +type DestinationPineconeEmbeddingFake struct { + Mode *DestinationPineconeEmbeddingFakeMode `json:"mode,omitempty"` +} + +type DestinationPineconeEmbeddingCohereMode string + +const ( + DestinationPineconeEmbeddingCohereModeCohere DestinationPineconeEmbeddingCohereMode = "cohere" +) + +func (e DestinationPineconeEmbeddingCohereMode) ToPointer() *DestinationPineconeEmbeddingCohereMode { + return &e +} + +func (e *DestinationPineconeEmbeddingCohereMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "cohere": + *e = DestinationPineconeEmbeddingCohereMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconeEmbeddingCohereMode: %v", v) + } +} + +// DestinationPineconeEmbeddingCohere - Use the Cohere API to embed text. +type DestinationPineconeEmbeddingCohere struct { + CohereKey string `json:"cohere_key"` + Mode *DestinationPineconeEmbeddingCohereMode `json:"mode,omitempty"` +} + +type DestinationPineconeEmbeddingOpenAIMode string + +const ( + DestinationPineconeEmbeddingOpenAIModeOpenai DestinationPineconeEmbeddingOpenAIMode = "openai" +) + +func (e DestinationPineconeEmbeddingOpenAIMode) ToPointer() *DestinationPineconeEmbeddingOpenAIMode { + return &e +} + +func (e *DestinationPineconeEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "openai": + *e = DestinationPineconeEmbeddingOpenAIMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconeEmbeddingOpenAIMode: %v", v) + } +} + +// DestinationPineconeEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. +type DestinationPineconeEmbeddingOpenAI struct { + Mode *DestinationPineconeEmbeddingOpenAIMode `json:"mode,omitempty"` + OpenaiKey string `json:"openai_key"` +} + +type DestinationPineconeEmbeddingType string + +const ( + DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingOpenAI DestinationPineconeEmbeddingType = "destination-pinecone_Embedding_OpenAI" + DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingCohere DestinationPineconeEmbeddingType = "destination-pinecone_Embedding_Cohere" + DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingFake DestinationPineconeEmbeddingType = "destination-pinecone_Embedding_Fake" +) + +type DestinationPineconeEmbedding struct { + DestinationPineconeEmbeddingOpenAI *DestinationPineconeEmbeddingOpenAI + DestinationPineconeEmbeddingCohere *DestinationPineconeEmbeddingCohere + DestinationPineconeEmbeddingFake *DestinationPineconeEmbeddingFake + + Type DestinationPineconeEmbeddingType +} + +func CreateDestinationPineconeEmbeddingDestinationPineconeEmbeddingOpenAI(destinationPineconeEmbeddingOpenAI DestinationPineconeEmbeddingOpenAI) DestinationPineconeEmbedding { + typ := DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingOpenAI + + return DestinationPineconeEmbedding{ + DestinationPineconeEmbeddingOpenAI: &destinationPineconeEmbeddingOpenAI, + Type: typ, + } +} + +func CreateDestinationPineconeEmbeddingDestinationPineconeEmbeddingCohere(destinationPineconeEmbeddingCohere DestinationPineconeEmbeddingCohere) DestinationPineconeEmbedding { + typ := DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingCohere + + return DestinationPineconeEmbedding{ + DestinationPineconeEmbeddingCohere: &destinationPineconeEmbeddingCohere, + Type: typ, + } +} + +func CreateDestinationPineconeEmbeddingDestinationPineconeEmbeddingFake(destinationPineconeEmbeddingFake DestinationPineconeEmbeddingFake) DestinationPineconeEmbedding { + typ := DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingFake + + return DestinationPineconeEmbedding{ + DestinationPineconeEmbeddingFake: &destinationPineconeEmbeddingFake, + Type: typ, + } +} + +func (u *DestinationPineconeEmbedding) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + destinationPineconeEmbeddingFake := new(DestinationPineconeEmbeddingFake) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationPineconeEmbeddingFake); err == nil { + u.DestinationPineconeEmbeddingFake = destinationPineconeEmbeddingFake + u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingFake + return nil + } + + destinationPineconeEmbeddingOpenAI := new(DestinationPineconeEmbeddingOpenAI) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationPineconeEmbeddingOpenAI); err == nil { + u.DestinationPineconeEmbeddingOpenAI = destinationPineconeEmbeddingOpenAI + u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingOpenAI + return nil + } + + destinationPineconeEmbeddingCohere := new(DestinationPineconeEmbeddingCohere) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationPineconeEmbeddingCohere); err == nil { + u.DestinationPineconeEmbeddingCohere = destinationPineconeEmbeddingCohere + u.Type = DestinationPineconeEmbeddingTypeDestinationPineconeEmbeddingCohere + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u DestinationPineconeEmbedding) MarshalJSON() ([]byte, error) { + if u.DestinationPineconeEmbeddingFake != nil { + return json.Marshal(u.DestinationPineconeEmbeddingFake) + } + + if u.DestinationPineconeEmbeddingOpenAI != nil { + return json.Marshal(u.DestinationPineconeEmbeddingOpenAI) + } + + if u.DestinationPineconeEmbeddingCohere != nil { + return json.Marshal(u.DestinationPineconeEmbeddingCohere) + } + + return nil, nil +} + +// DestinationPineconeIndexing - Pinecone is a popular vector store that can be used to store and retrieve embeddings. +type DestinationPineconeIndexing struct { + // Pinecone index to use + Index string `json:"index"` + // Pinecone environment to use + PineconeEnvironment string `json:"pinecone_environment"` + PineconeKey string `json:"pinecone_key"` +} + +type DestinationPineconeProcessingConfigModel struct { + // Size of overlap between chunks in tokens to store in vector store to better capture relevant context + ChunkOverlap *int64 `json:"chunk_overlap,omitempty"` + // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + ChunkSize int64 `json:"chunk_size"` + // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. + MetadataFields []string `json:"metadata_fields,omitempty"` + // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + TextFields []string `json:"text_fields,omitempty"` +} + +type DestinationPinecone struct { + DestinationType DestinationPineconePinecone `json:"destinationType"` + // Embedding configuration + Embedding DestinationPineconeEmbedding `json:"embedding"` + // Pinecone is a popular vector store that can be used to store and retrieve embeddings. + Indexing DestinationPineconeIndexing `json:"indexing"` + Processing DestinationPineconeProcessingConfigModel `json:"processing"` +} diff --git a/internal/sdk/pkg/models/shared/sourcedatadogputrequest.go b/internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go similarity index 65% rename from internal/sdk/pkg/models/shared/sourcedatadogputrequest.go rename to internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go index ddc02b5f0..47c7380cf 100755 --- a/internal/sdk/pkg/models/shared/sourcedatadogputrequest.go +++ b/internal/sdk/pkg/models/shared/destinationpineconecreaterequest.go @@ -2,8 +2,8 @@ package shared -type SourceDatadogPutRequest struct { - Configuration SourceDatadogUpdate `json:"configuration"` +type DestinationPineconeCreateRequest struct { + Configuration DestinationPinecone `json:"configuration"` Name string `json:"name"` WorkspaceID string `json:"workspaceId"` } diff --git a/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go b/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go new file mode 100755 index 000000000..d59659835 --- /dev/null +++ b/internal/sdk/pkg/models/shared/destinationpineconeputrequest.go @@ -0,0 +1,9 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package shared + +type DestinationPineconePutRequest struct { + Configuration DestinationPineconeUpdate `json:"configuration"` + Name string `json:"name"` + WorkspaceID string `json:"workspaceId"` +} diff --git a/internal/sdk/pkg/models/shared/destinationpineconeupdate.go b/internal/sdk/pkg/models/shared/destinationpineconeupdate.go new file mode 100755 index 000000000..813b50eb6 --- /dev/null +++ b/internal/sdk/pkg/models/shared/destinationpineconeupdate.go @@ -0,0 +1,219 @@ +// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. + +package shared + +import ( + "bytes" + "encoding/json" + "errors" + "fmt" +) + +type DestinationPineconeUpdateEmbeddingFakeMode string + +const ( + DestinationPineconeUpdateEmbeddingFakeModeFake DestinationPineconeUpdateEmbeddingFakeMode = "fake" +) + +func (e DestinationPineconeUpdateEmbeddingFakeMode) ToPointer() *DestinationPineconeUpdateEmbeddingFakeMode { + return &e +} + +func (e *DestinationPineconeUpdateEmbeddingFakeMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "fake": + *e = DestinationPineconeUpdateEmbeddingFakeMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconeUpdateEmbeddingFakeMode: %v", v) + } +} + +// DestinationPineconeUpdateEmbeddingFake - Use a fake embedding made out of random vectors with 1536 embedding dimensions. This is useful for testing the data pipeline without incurring any costs. +type DestinationPineconeUpdateEmbeddingFake struct { + Mode *DestinationPineconeUpdateEmbeddingFakeMode `json:"mode,omitempty"` +} + +type DestinationPineconeUpdateEmbeddingCohereMode string + +const ( + DestinationPineconeUpdateEmbeddingCohereModeCohere DestinationPineconeUpdateEmbeddingCohereMode = "cohere" +) + +func (e DestinationPineconeUpdateEmbeddingCohereMode) ToPointer() *DestinationPineconeUpdateEmbeddingCohereMode { + return &e +} + +func (e *DestinationPineconeUpdateEmbeddingCohereMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "cohere": + *e = DestinationPineconeUpdateEmbeddingCohereMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconeUpdateEmbeddingCohereMode: %v", v) + } +} + +// DestinationPineconeUpdateEmbeddingCohere - Use the Cohere API to embed text. +type DestinationPineconeUpdateEmbeddingCohere struct { + CohereKey string `json:"cohere_key"` + Mode *DestinationPineconeUpdateEmbeddingCohereMode `json:"mode,omitempty"` +} + +type DestinationPineconeUpdateEmbeddingOpenAIMode string + +const ( + DestinationPineconeUpdateEmbeddingOpenAIModeOpenai DestinationPineconeUpdateEmbeddingOpenAIMode = "openai" +) + +func (e DestinationPineconeUpdateEmbeddingOpenAIMode) ToPointer() *DestinationPineconeUpdateEmbeddingOpenAIMode { + return &e +} + +func (e *DestinationPineconeUpdateEmbeddingOpenAIMode) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "openai": + *e = DestinationPineconeUpdateEmbeddingOpenAIMode(v) + return nil + default: + return fmt.Errorf("invalid value for DestinationPineconeUpdateEmbeddingOpenAIMode: %v", v) + } +} + +// DestinationPineconeUpdateEmbeddingOpenAI - Use the OpenAI API to embed text. This option is using the text-embedding-ada-002 model with 1536 embedding dimensions. +type DestinationPineconeUpdateEmbeddingOpenAI struct { + Mode *DestinationPineconeUpdateEmbeddingOpenAIMode `json:"mode,omitempty"` + OpenaiKey string `json:"openai_key"` +} + +type DestinationPineconeUpdateEmbeddingType string + +const ( + DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingOpenAI DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Embedding_OpenAI" + DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingCohere DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Embedding_Cohere" + DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingFake DestinationPineconeUpdateEmbeddingType = "destination-pinecone-update_Embedding_Fake" +) + +type DestinationPineconeUpdateEmbedding struct { + DestinationPineconeUpdateEmbeddingOpenAI *DestinationPineconeUpdateEmbeddingOpenAI + DestinationPineconeUpdateEmbeddingCohere *DestinationPineconeUpdateEmbeddingCohere + DestinationPineconeUpdateEmbeddingFake *DestinationPineconeUpdateEmbeddingFake + + Type DestinationPineconeUpdateEmbeddingType +} + +func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateEmbeddingOpenAI(destinationPineconeUpdateEmbeddingOpenAI DestinationPineconeUpdateEmbeddingOpenAI) DestinationPineconeUpdateEmbedding { + typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingOpenAI + + return DestinationPineconeUpdateEmbedding{ + DestinationPineconeUpdateEmbeddingOpenAI: &destinationPineconeUpdateEmbeddingOpenAI, + Type: typ, + } +} + +func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateEmbeddingCohere(destinationPineconeUpdateEmbeddingCohere DestinationPineconeUpdateEmbeddingCohere) DestinationPineconeUpdateEmbedding { + typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingCohere + + return DestinationPineconeUpdateEmbedding{ + DestinationPineconeUpdateEmbeddingCohere: &destinationPineconeUpdateEmbeddingCohere, + Type: typ, + } +} + +func CreateDestinationPineconeUpdateEmbeddingDestinationPineconeUpdateEmbeddingFake(destinationPineconeUpdateEmbeddingFake DestinationPineconeUpdateEmbeddingFake) DestinationPineconeUpdateEmbedding { + typ := DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingFake + + return DestinationPineconeUpdateEmbedding{ + DestinationPineconeUpdateEmbeddingFake: &destinationPineconeUpdateEmbeddingFake, + Type: typ, + } +} + +func (u *DestinationPineconeUpdateEmbedding) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + destinationPineconeUpdateEmbeddingFake := new(DestinationPineconeUpdateEmbeddingFake) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationPineconeUpdateEmbeddingFake); err == nil { + u.DestinationPineconeUpdateEmbeddingFake = destinationPineconeUpdateEmbeddingFake + u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingFake + return nil + } + + destinationPineconeUpdateEmbeddingOpenAI := new(DestinationPineconeUpdateEmbeddingOpenAI) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationPineconeUpdateEmbeddingOpenAI); err == nil { + u.DestinationPineconeUpdateEmbeddingOpenAI = destinationPineconeUpdateEmbeddingOpenAI + u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingOpenAI + return nil + } + + destinationPineconeUpdateEmbeddingCohere := new(DestinationPineconeUpdateEmbeddingCohere) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationPineconeUpdateEmbeddingCohere); err == nil { + u.DestinationPineconeUpdateEmbeddingCohere = destinationPineconeUpdateEmbeddingCohere + u.Type = DestinationPineconeUpdateEmbeddingTypeDestinationPineconeUpdateEmbeddingCohere + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u DestinationPineconeUpdateEmbedding) MarshalJSON() ([]byte, error) { + if u.DestinationPineconeUpdateEmbeddingFake != nil { + return json.Marshal(u.DestinationPineconeUpdateEmbeddingFake) + } + + if u.DestinationPineconeUpdateEmbeddingOpenAI != nil { + return json.Marshal(u.DestinationPineconeUpdateEmbeddingOpenAI) + } + + if u.DestinationPineconeUpdateEmbeddingCohere != nil { + return json.Marshal(u.DestinationPineconeUpdateEmbeddingCohere) + } + + return nil, nil +} + +// DestinationPineconeUpdateIndexing - Pinecone is a popular vector store that can be used to store and retrieve embeddings. +type DestinationPineconeUpdateIndexing struct { + // Pinecone index to use + Index string `json:"index"` + // Pinecone environment to use + PineconeEnvironment string `json:"pinecone_environment"` + PineconeKey string `json:"pinecone_key"` +} + +type DestinationPineconeUpdateProcessingConfigModel struct { + // Size of overlap between chunks in tokens to store in vector store to better capture relevant context + ChunkOverlap *int64 `json:"chunk_overlap,omitempty"` + // Size of chunks in tokens to store in vector store (make sure it is not too big for the context if your LLM) + ChunkSize int64 `json:"chunk_size"` + // List of fields in the record that should be stored as metadata. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered metadata fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. When specifying nested paths, all matching values are flattened into an array set to a field named by the path. + MetadataFields []string `json:"metadata_fields,omitempty"` + // List of fields in the record that should be used to calculate the embedding. The field list is applied to all streams in the same way and non-existing fields are ignored. If none are defined, all fields are considered text fields. When specifying text fields, you can access nested fields in the record by using dot notation, e.g. `user.name` will access the `name` field in the `user` object. It's also possible to use wildcards to access all fields in an object, e.g. `users.*.name` will access all `names` fields in all entries of the `users` array. + TextFields []string `json:"text_fields,omitempty"` +} + +type DestinationPineconeUpdate struct { + // Embedding configuration + Embedding DestinationPineconeUpdateEmbedding `json:"embedding"` + // Pinecone is a popular vector store that can be used to store and retrieve embeddings. + Indexing DestinationPineconeUpdateIndexing `json:"indexing"` + Processing DestinationPineconeUpdateProcessingConfigModel `json:"processing"` +} diff --git a/internal/sdk/pkg/models/shared/destinations3.go b/internal/sdk/pkg/models/shared/destinations3.go index 766426771..bb6abc638 100755 --- a/internal/sdk/pkg/models/shared/destinations3.go +++ b/internal/sdk/pkg/models/shared/destinations3.go @@ -761,21 +761,30 @@ func (u *DestinationS3OutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON( return nil } - destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate) + destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { - u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate = destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate - u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate + if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { + u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 + u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 return nil } - destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2) + destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { - u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 - u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 + if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { + u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy = destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy + u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy + return nil + } + + destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { + u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate = destinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate + u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate return nil } @@ -797,15 +806,6 @@ func (u *DestinationS3OutputFormatAvroApacheAvroCompressionCodec) UnmarshalJSON( return nil } - destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { - u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy = destinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy - u.Type = DestinationS3OutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy - return nil - } - return errors.New("could not unmarshal into supported union types") } @@ -814,14 +814,18 @@ func (u DestinationS3OutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() ( return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecNoCompression) } - if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate != nil { - return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate) - } - if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2 != nil { return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecBzip2) } + if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy != nil { + return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy) + } + + if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate != nil { + return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecDeflate) + } + if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz != nil { return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecXz) } @@ -830,10 +834,6 @@ func (u DestinationS3OutputFormatAvroApacheAvroCompressionCodec) MarshalJSON() ( return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecZstandard) } - if u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy != nil { - return json.Marshal(u.DestinationS3OutputFormatAvroApacheAvroCompressionCodecSnappy) - } - return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinations3update.go b/internal/sdk/pkg/models/shared/destinations3update.go index 67ac0ab5c..62ebb26be 100755 --- a/internal/sdk/pkg/models/shared/destinations3update.go +++ b/internal/sdk/pkg/models/shared/destinations3update.go @@ -737,21 +737,30 @@ func (u *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec) Unmarsha return nil } - destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) + destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { - u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate - u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate + if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { + u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 + u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 return nil } - destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) + destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2); err == nil { - u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 - u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 + if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { + u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy + u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy + return nil + } + + destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate); err == nil { + u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate + u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate return nil } @@ -773,15 +782,6 @@ func (u *DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec) Unmarsha return nil } - destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy := new(DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy); err == nil { - u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy = destinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy - u.Type = DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecTypeDestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy - return nil - } - return errors.New("could not unmarshal into supported union types") } @@ -790,14 +790,18 @@ func (u DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec) MarshalJS return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecNoCompression) } - if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil { - return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) - } - if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2 != nil { return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecBzip2) } + if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil { + return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) + } + + if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate != nil { + return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecDeflate) + } + if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz != nil { return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecXz) } @@ -806,10 +810,6 @@ func (u DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodec) MarshalJS return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecZstandard) } - if u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy != nil { - return json.Marshal(u.DestinationS3UpdateOutputFormatAvroApacheAvroCompressionCodecSnappy) - } - return nil, nil } diff --git a/internal/sdk/pkg/models/shared/destinationsnowflake.go b/internal/sdk/pkg/models/shared/destinationsnowflake.go index f9f69515a..8af8ad074 100755 --- a/internal/sdk/pkg/models/shared/destinationsnowflake.go +++ b/internal/sdk/pkg/models/shared/destinationsnowflake.go @@ -153,12 +153,12 @@ func CreateDestinationSnowflakeAuthorizationMethodDestinationSnowflakeAuthorizat func (u *DestinationSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationSnowflakeAuthorizationMethodOAuth20 := new(DestinationSnowflakeAuthorizationMethodOAuth20) + destinationSnowflakeAuthorizationMethodUsernameAndPassword := new(DestinationSnowflakeAuthorizationMethodUsernameAndPassword) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationSnowflakeAuthorizationMethodOAuth20); err == nil { - u.DestinationSnowflakeAuthorizationMethodOAuth20 = destinationSnowflakeAuthorizationMethodOAuth20 - u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodOAuth20 + if err := d.Decode(&destinationSnowflakeAuthorizationMethodUsernameAndPassword); err == nil { + u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword = destinationSnowflakeAuthorizationMethodUsernameAndPassword + u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodUsernameAndPassword return nil } @@ -171,12 +171,12 @@ func (u *DestinationSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) err return nil } - destinationSnowflakeAuthorizationMethodUsernameAndPassword := new(DestinationSnowflakeAuthorizationMethodUsernameAndPassword) + destinationSnowflakeAuthorizationMethodOAuth20 := new(DestinationSnowflakeAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationSnowflakeAuthorizationMethodUsernameAndPassword); err == nil { - u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword = destinationSnowflakeAuthorizationMethodUsernameAndPassword - u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodUsernameAndPassword + if err := d.Decode(&destinationSnowflakeAuthorizationMethodOAuth20); err == nil { + u.DestinationSnowflakeAuthorizationMethodOAuth20 = destinationSnowflakeAuthorizationMethodOAuth20 + u.Type = DestinationSnowflakeAuthorizationMethodTypeDestinationSnowflakeAuthorizationMethodOAuth20 return nil } @@ -184,16 +184,16 @@ func (u *DestinationSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) err } func (u DestinationSnowflakeAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.DestinationSnowflakeAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.DestinationSnowflakeAuthorizationMethodOAuth20) + if u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword != nil { + return json.Marshal(u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword) } if u.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication != nil { return json.Marshal(u.DestinationSnowflakeAuthorizationMethodKeyPairAuthentication) } - if u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword != nil { - return json.Marshal(u.DestinationSnowflakeAuthorizationMethodUsernameAndPassword) + if u.DestinationSnowflakeAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.DestinationSnowflakeAuthorizationMethodOAuth20) } return nil, nil @@ -232,14 +232,12 @@ type DestinationSnowflake struct { Host string `json:"host"` // Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 JdbcURLParams *string `json:"jdbc_url_params,omitempty"` - // (Beta) The schema to write raw tables into + // The schema to write raw tables into RawDataSchema *string `json:"raw_data_schema,omitempty"` // Enter the role that you want to use to access Snowflake Role string `json:"role"` // Enter the name of the default schema Schema string `json:"schema"` - // (Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program. - Use1s1tFormat *bool `json:"use_1s1t_format,omitempty"` // Enter the name of the user you want to use to access the database Username string `json:"username"` // Enter the name of the warehouse that you want to sync data into diff --git a/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go b/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go index 892998d82..d5f7e7d2e 100755 --- a/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go +++ b/internal/sdk/pkg/models/shared/destinationsnowflakeupdate.go @@ -153,12 +153,12 @@ func CreateDestinationSnowflakeUpdateAuthorizationMethodDestinationSnowflakeUpda func (u *DestinationSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - destinationSnowflakeUpdateAuthorizationMethodOAuth20 := new(DestinationSnowflakeUpdateAuthorizationMethodOAuth20) + destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword := new(DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodOAuth20); err == nil { - u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 = destinationSnowflakeUpdateAuthorizationMethodOAuth20 - u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword); err == nil { + u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword = destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword + u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword return nil } @@ -171,12 +171,12 @@ func (u *DestinationSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byt return nil } - destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword := new(DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword) + destinationSnowflakeUpdateAuthorizationMethodOAuth20 := new(DestinationSnowflakeUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword); err == nil { - u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword = destinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword - u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword + if err := d.Decode(&destinationSnowflakeUpdateAuthorizationMethodOAuth20); err == nil { + u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 = destinationSnowflakeUpdateAuthorizationMethodOAuth20 + u.Type = DestinationSnowflakeUpdateAuthorizationMethodTypeDestinationSnowflakeUpdateAuthorizationMethodOAuth20 return nil } @@ -184,16 +184,16 @@ func (u *DestinationSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byt } func (u DestinationSnowflakeUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20) + if u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil { + return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword) } if u.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication != nil { return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodKeyPairAuthentication) } - if u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil { - return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodUsernameAndPassword) + if u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.DestinationSnowflakeUpdateAuthorizationMethodOAuth20) } return nil, nil @@ -207,14 +207,12 @@ type DestinationSnowflakeUpdate struct { Host string `json:"host"` // Enter the additional properties to pass to the JDBC URL string when connecting to the database (formatted as key=value pairs separated by the symbol &). Example: key1=value1&key2=value2&key3=value3 JdbcURLParams *string `json:"jdbc_url_params,omitempty"` - // (Beta) The schema to write raw tables into + // The schema to write raw tables into RawDataSchema *string `json:"raw_data_schema,omitempty"` // Enter the role that you want to use to access Snowflake Role string `json:"role"` // Enter the name of the default schema Schema string `json:"schema"` - // (Beta) Use Destinations V2. Contact Airbyte Support to participate in the beta program. - Use1s1tFormat *bool `json:"use_1s1t_format,omitempty"` // Enter the name of the user you want to use to access the database Username string `json:"username"` // Enter the name of the warehouse that you want to sync data into diff --git a/internal/sdk/pkg/models/shared/destinationsresponse.go b/internal/sdk/pkg/models/shared/destinationsresponse.go index 250b80848..ec9a40233 100755 --- a/internal/sdk/pkg/models/shared/destinationsresponse.go +++ b/internal/sdk/pkg/models/shared/destinationsresponse.go @@ -2,7 +2,6 @@ package shared -// DestinationsResponse - Successful operation type DestinationsResponse struct { Data []DestinationResponse `json:"data"` Next *string `json:"next,omitempty"` diff --git a/internal/sdk/pkg/models/shared/destinationtypesense.go b/internal/sdk/pkg/models/shared/destinationtypesense.go index 51e4e79cb..ed06be1c1 100755 --- a/internal/sdk/pkg/models/shared/destinationtypesense.go +++ b/internal/sdk/pkg/models/shared/destinationtypesense.go @@ -35,7 +35,7 @@ type DestinationTypesense struct { // Typesense API Key APIKey string `json:"api_key"` // How many documents should be imported together. Default 1000 - BatchSize *string `json:"batch_size,omitempty"` + BatchSize *int64 `json:"batch_size,omitempty"` DestinationType DestinationTypesenseTypesense `json:"destinationType"` // Hostname of the Typesense instance without protocol. Host string `json:"host"` diff --git a/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go b/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go index 528f09651..e53ad1135 100755 --- a/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go +++ b/internal/sdk/pkg/models/shared/destinationtypesenseupdate.go @@ -6,7 +6,7 @@ type DestinationTypesenseUpdate struct { // Typesense API Key APIKey string `json:"api_key"` // How many documents should be imported together. Default 1000 - BatchSize *string `json:"batch_size,omitempty"` + BatchSize *int64 `json:"batch_size,omitempty"` // Hostname of the Typesense instance without protocol. Host string `json:"host"` // Port of the Typesense instance. Ex: 8108, 80, 443. Default is 443 diff --git a/internal/sdk/pkg/models/shared/jobsresponse.go b/internal/sdk/pkg/models/shared/jobsresponse.go index 52b799532..26501596d 100755 --- a/internal/sdk/pkg/models/shared/jobsresponse.go +++ b/internal/sdk/pkg/models/shared/jobsresponse.go @@ -2,7 +2,6 @@ package shared -// JobsResponse - List all the Jobs by connectionId. type JobsResponse struct { Data []JobResponse `json:"data"` Next *string `json:"next,omitempty"` diff --git a/internal/sdk/pkg/models/shared/jobstatusenum.go b/internal/sdk/pkg/models/shared/jobstatusenum.go index acc749fc4..38a1edfd4 100755 --- a/internal/sdk/pkg/models/shared/jobstatusenum.go +++ b/internal/sdk/pkg/models/shared/jobstatusenum.go @@ -7,7 +7,6 @@ import ( "fmt" ) -// JobStatusEnum - The Job status you want to filter by type JobStatusEnum string const ( diff --git a/internal/sdk/pkg/models/shared/sourceairtable.go b/internal/sdk/pkg/models/shared/sourceairtable.go index 7bd91a52b..21eda702c 100755 --- a/internal/sdk/pkg/models/shared/sourceairtable.go +++ b/internal/sdk/pkg/models/shared/sourceairtable.go @@ -113,21 +113,21 @@ func CreateSourceAirtableAuthenticationSourceAirtableAuthenticationPersonalAcces func (u *SourceAirtableAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceAirtableAuthenticationOAuth20 := new(SourceAirtableAuthenticationOAuth20) + sourceAirtableAuthenticationPersonalAccessToken := new(SourceAirtableAuthenticationPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAirtableAuthenticationOAuth20); err == nil { - u.SourceAirtableAuthenticationOAuth20 = sourceAirtableAuthenticationOAuth20 - u.Type = SourceAirtableAuthenticationTypeSourceAirtableAuthenticationOAuth20 + if err := d.Decode(&sourceAirtableAuthenticationPersonalAccessToken); err == nil { + u.SourceAirtableAuthenticationPersonalAccessToken = sourceAirtableAuthenticationPersonalAccessToken + u.Type = SourceAirtableAuthenticationTypeSourceAirtableAuthenticationPersonalAccessToken return nil } - sourceAirtableAuthenticationPersonalAccessToken := new(SourceAirtableAuthenticationPersonalAccessToken) + sourceAirtableAuthenticationOAuth20 := new(SourceAirtableAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAirtableAuthenticationPersonalAccessToken); err == nil { - u.SourceAirtableAuthenticationPersonalAccessToken = sourceAirtableAuthenticationPersonalAccessToken - u.Type = SourceAirtableAuthenticationTypeSourceAirtableAuthenticationPersonalAccessToken + if err := d.Decode(&sourceAirtableAuthenticationOAuth20); err == nil { + u.SourceAirtableAuthenticationOAuth20 = sourceAirtableAuthenticationOAuth20 + u.Type = SourceAirtableAuthenticationTypeSourceAirtableAuthenticationOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceAirtableAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceAirtableAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceAirtableAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceAirtableAuthenticationOAuth20) - } - if u.SourceAirtableAuthenticationPersonalAccessToken != nil { return json.Marshal(u.SourceAirtableAuthenticationPersonalAccessToken) } + if u.SourceAirtableAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceAirtableAuthenticationOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceairtableupdate.go b/internal/sdk/pkg/models/shared/sourceairtableupdate.go index bb1537020..3e58e82e5 100755 --- a/internal/sdk/pkg/models/shared/sourceairtableupdate.go +++ b/internal/sdk/pkg/models/shared/sourceairtableupdate.go @@ -113,21 +113,21 @@ func CreateSourceAirtableUpdateAuthenticationSourceAirtableUpdateAuthenticationP func (u *SourceAirtableUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceAirtableUpdateAuthenticationOAuth20 := new(SourceAirtableUpdateAuthenticationOAuth20) + sourceAirtableUpdateAuthenticationPersonalAccessToken := new(SourceAirtableUpdateAuthenticationPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAirtableUpdateAuthenticationOAuth20); err == nil { - u.SourceAirtableUpdateAuthenticationOAuth20 = sourceAirtableUpdateAuthenticationOAuth20 - u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationOAuth20 + if err := d.Decode(&sourceAirtableUpdateAuthenticationPersonalAccessToken); err == nil { + u.SourceAirtableUpdateAuthenticationPersonalAccessToken = sourceAirtableUpdateAuthenticationPersonalAccessToken + u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationPersonalAccessToken return nil } - sourceAirtableUpdateAuthenticationPersonalAccessToken := new(SourceAirtableUpdateAuthenticationPersonalAccessToken) + sourceAirtableUpdateAuthenticationOAuth20 := new(SourceAirtableUpdateAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAirtableUpdateAuthenticationPersonalAccessToken); err == nil { - u.SourceAirtableUpdateAuthenticationPersonalAccessToken = sourceAirtableUpdateAuthenticationPersonalAccessToken - u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationPersonalAccessToken + if err := d.Decode(&sourceAirtableUpdateAuthenticationOAuth20); err == nil { + u.SourceAirtableUpdateAuthenticationOAuth20 = sourceAirtableUpdateAuthenticationOAuth20 + u.Type = SourceAirtableUpdateAuthenticationTypeSourceAirtableUpdateAuthenticationOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceAirtableUpdateAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceAirtableUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceAirtableUpdateAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceAirtableUpdateAuthenticationOAuth20) - } - if u.SourceAirtableUpdateAuthenticationPersonalAccessToken != nil { return json.Marshal(u.SourceAirtableUpdateAuthenticationPersonalAccessToken) } + if u.SourceAirtableUpdateAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceAirtableUpdateAuthenticationOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcealloydb.go b/internal/sdk/pkg/models/shared/sourcealloydb.go index 737fffcfb..58e429820 100755 --- a/internal/sdk/pkg/models/shared/sourcealloydb.go +++ b/internal/sdk/pkg/models/shared/sourcealloydb.go @@ -268,21 +268,21 @@ func (u *SourceAlloydbReplicationMethod) UnmarshalJSON(data []byte) error { return nil } - sourceAlloydbReplicationMethodLogicalReplicationCDC := new(SourceAlloydbReplicationMethodLogicalReplicationCDC) + sourceAlloydbReplicationMethodStandard := new(SourceAlloydbReplicationMethodStandard) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAlloydbReplicationMethodLogicalReplicationCDC); err == nil { - u.SourceAlloydbReplicationMethodLogicalReplicationCDC = sourceAlloydbReplicationMethodLogicalReplicationCDC - u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodLogicalReplicationCDC + if err := d.Decode(&sourceAlloydbReplicationMethodStandard); err == nil { + u.SourceAlloydbReplicationMethodStandard = sourceAlloydbReplicationMethodStandard + u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandard return nil } - sourceAlloydbReplicationMethodStandard := new(SourceAlloydbReplicationMethodStandard) + sourceAlloydbReplicationMethodLogicalReplicationCDC := new(SourceAlloydbReplicationMethodLogicalReplicationCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAlloydbReplicationMethodStandard); err == nil { - u.SourceAlloydbReplicationMethodStandard = sourceAlloydbReplicationMethodStandard - u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodStandard + if err := d.Decode(&sourceAlloydbReplicationMethodLogicalReplicationCDC); err == nil { + u.SourceAlloydbReplicationMethodLogicalReplicationCDC = sourceAlloydbReplicationMethodLogicalReplicationCDC + u.Type = SourceAlloydbReplicationMethodTypeSourceAlloydbReplicationMethodLogicalReplicationCDC return nil } @@ -294,14 +294,14 @@ func (u SourceAlloydbReplicationMethod) MarshalJSON() ([]byte, error) { return json.Marshal(u.SourceAlloydbReplicationMethodStandardXmin) } - if u.SourceAlloydbReplicationMethodLogicalReplicationCDC != nil { - return json.Marshal(u.SourceAlloydbReplicationMethodLogicalReplicationCDC) - } - if u.SourceAlloydbReplicationMethodStandard != nil { return json.Marshal(u.SourceAlloydbReplicationMethodStandard) } + if u.SourceAlloydbReplicationMethodLogicalReplicationCDC != nil { + return json.Marshal(u.SourceAlloydbReplicationMethodLogicalReplicationCDC) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcealloydbupdate.go b/internal/sdk/pkg/models/shared/sourcealloydbupdate.go index 532463720..a385ff6d1 100755 --- a/internal/sdk/pkg/models/shared/sourcealloydbupdate.go +++ b/internal/sdk/pkg/models/shared/sourcealloydbupdate.go @@ -268,21 +268,21 @@ func (u *SourceAlloydbUpdateReplicationMethod) UnmarshalJSON(data []byte) error return nil } - sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC := new(SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) + sourceAlloydbUpdateReplicationMethodStandard := new(SourceAlloydbUpdateReplicationMethodStandard) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC); err == nil { - u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC = sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC - u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodLogicalReplicationCDC + if err := d.Decode(&sourceAlloydbUpdateReplicationMethodStandard); err == nil { + u.SourceAlloydbUpdateReplicationMethodStandard = sourceAlloydbUpdateReplicationMethodStandard + u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandard return nil } - sourceAlloydbUpdateReplicationMethodStandard := new(SourceAlloydbUpdateReplicationMethodStandard) + sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC := new(SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAlloydbUpdateReplicationMethodStandard); err == nil { - u.SourceAlloydbUpdateReplicationMethodStandard = sourceAlloydbUpdateReplicationMethodStandard - u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodStandard + if err := d.Decode(&sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC); err == nil { + u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC = sourceAlloydbUpdateReplicationMethodLogicalReplicationCDC + u.Type = SourceAlloydbUpdateReplicationMethodTypeSourceAlloydbUpdateReplicationMethodLogicalReplicationCDC return nil } @@ -294,14 +294,14 @@ func (u SourceAlloydbUpdateReplicationMethod) MarshalJSON() ([]byte, error) { return json.Marshal(u.SourceAlloydbUpdateReplicationMethodStandardXmin) } - if u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC != nil { - return json.Marshal(u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) - } - if u.SourceAlloydbUpdateReplicationMethodStandard != nil { return json.Marshal(u.SourceAlloydbUpdateReplicationMethodStandard) } + if u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC != nil { + return json.Marshal(u.SourceAlloydbUpdateReplicationMethodLogicalReplicationCDC) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceamazonads.go b/internal/sdk/pkg/models/shared/sourceamazonads.go index e52bd82db..d2fa1f1f9 100755 --- a/internal/sdk/pkg/models/shared/sourceamazonads.go +++ b/internal/sdk/pkg/models/shared/sourceamazonads.go @@ -169,7 +169,9 @@ type SourceAmazonAds struct { ClientSecret string `json:"client_secret"` // The amount of days to go back in time to get the updated data from Amazon Ads LookBackWindow *int64 `json:"look_back_window,omitempty"` - // Profile IDs you want to fetch data for. See docs for more details. + // Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. + MarketplaceIds []string `json:"marketplace_ids,omitempty"` + // Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. Profiles []int64 `json:"profiles,omitempty"` // Amazon Ads refresh token. See the docs for more information on how to obtain this token. RefreshToken string `json:"refresh_token"` diff --git a/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go b/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go index 9ccd45f3d..4087cc4ae 100755 --- a/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go +++ b/internal/sdk/pkg/models/shared/sourceamazonadsupdate.go @@ -145,7 +145,9 @@ type SourceAmazonAdsUpdate struct { ClientSecret string `json:"client_secret"` // The amount of days to go back in time to get the updated data from Amazon Ads LookBackWindow *int64 `json:"look_back_window,omitempty"` - // Profile IDs you want to fetch data for. See docs for more details. + // Marketplace IDs you want to fetch data for. Note: If Profile IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. + MarketplaceIds []string `json:"marketplace_ids,omitempty"` + // Profile IDs you want to fetch data for. See docs for more details. Note: If Marketplace IDs are also selected, profiles will be selected if they match the Profile ID OR the Marketplace ID. Profiles []int64 `json:"profiles,omitempty"` // Amazon Ads refresh token. See the docs for more information on how to obtain this token. RefreshToken string `json:"refresh_token"` diff --git a/internal/sdk/pkg/models/shared/sourceapifydataset.go b/internal/sdk/pkg/models/shared/sourceapifydataset.go index 25069a4f9..ac5137d45 100755 --- a/internal/sdk/pkg/models/shared/sourceapifydataset.go +++ b/internal/sdk/pkg/models/shared/sourceapifydataset.go @@ -35,6 +35,8 @@ type SourceApifyDataset struct { // If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false. Clean *bool `json:"clean,omitempty"` // ID of the dataset you would like to load to Airbyte. - DatasetID string `json:"datasetId"` + DatasetID *string `json:"datasetId,omitempty"` SourceType SourceApifyDatasetApifyDataset `json:"sourceType"` + // Your application's Client Secret. You can find this value on the console integrations tab after you login. + Token string `json:"token"` } diff --git a/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go b/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go index 791dde232..750eddd04 100755 --- a/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go +++ b/internal/sdk/pkg/models/shared/sourceapifydatasetupdate.go @@ -6,5 +6,7 @@ type SourceApifyDatasetUpdate struct { // If set to true, only clean items will be downloaded from the dataset. See description of what clean means in Apify API docs. If not sure, set clean to false. Clean *bool `json:"clean,omitempty"` // ID of the dataset you would like to load to Airbyte. - DatasetID string `json:"datasetId"` + DatasetID *string `json:"datasetId,omitempty"` + // Your application's Client Secret. You can find this value on the console integrations tab after you login. + Token string `json:"token"` } diff --git a/internal/sdk/pkg/models/shared/sourceasana.go b/internal/sdk/pkg/models/shared/sourceasana.go index a1ce21524..ced0f41a3 100755 --- a/internal/sdk/pkg/models/shared/sourceasana.go +++ b/internal/sdk/pkg/models/shared/sourceasana.go @@ -111,21 +111,21 @@ func CreateSourceAsanaAuthenticationMechanismSourceAsanaAuthenticationMechanismA func (u *SourceAsanaAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth := new(SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth) + sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth); err == nil { - u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth = sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth - u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth + if err := d.Decode(&sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { + u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken + u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken return nil } - sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken) + sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth := new(SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { - u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken - u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken + if err := d.Decode(&sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth); err == nil { + u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth = sourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth + u.Type = SourceAsanaAuthenticationMechanismTypeSourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth return nil } @@ -133,14 +133,14 @@ func (u *SourceAsanaAuthenticationMechanism) UnmarshalJSON(data []byte) error { } func (u SourceAsanaAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth != nil { - return json.Marshal(u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth) - } - if u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil { return json.Marshal(u.SourceAsanaAuthenticationMechanismAuthenticateWithPersonalAccessToken) } + if u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth != nil { + return json.Marshal(u.SourceAsanaAuthenticationMechanismAuthenticateViaAsanaOauth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceasanaupdate.go b/internal/sdk/pkg/models/shared/sourceasanaupdate.go index 2ccc5e019..0ef1dc6b2 100755 --- a/internal/sdk/pkg/models/shared/sourceasanaupdate.go +++ b/internal/sdk/pkg/models/shared/sourceasanaupdate.go @@ -111,21 +111,21 @@ func CreateSourceAsanaUpdateAuthenticationMechanismSourceAsanaUpdateAuthenticati func (u *SourceAsanaUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth := new(SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth) + sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth); err == nil { - u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth = sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth - u.Type = SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth + if err := d.Decode(&sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { + u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken + u.Type = SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken return nil } - sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) + sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth := new(SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { - u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken - u.Type = SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken + if err := d.Decode(&sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth); err == nil { + u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth = sourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth + u.Type = SourceAsanaUpdateAuthenticationMechanismTypeSourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth return nil } @@ -133,14 +133,14 @@ func (u *SourceAsanaUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) er } func (u SourceAsanaUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth != nil { - return json.Marshal(u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth) - } - if u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil { return json.Marshal(u.SourceAsanaUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) } + if u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth != nil { + return json.Marshal(u.SourceAsanaUpdateAuthenticationMechanismAuthenticateViaAsanaOauth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceauth0.go b/internal/sdk/pkg/models/shared/sourceauth0.go index 27823eb69..ee9c68f65 100755 --- a/internal/sdk/pkg/models/shared/sourceauth0.go +++ b/internal/sdk/pkg/models/shared/sourceauth0.go @@ -108,21 +108,21 @@ func CreateSourceAuth0AuthenticationMethodSourceAuth0AuthenticationMethodOAuth2A func (u *SourceAuth0AuthenticationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication := new(SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication) + sourceAuth0AuthenticationMethodOAuth2AccessToken := new(SourceAuth0AuthenticationMethodOAuth2AccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication); err == nil { - u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication = sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication - u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2ConfidentialApplication + if err := d.Decode(&sourceAuth0AuthenticationMethodOAuth2AccessToken); err == nil { + u.SourceAuth0AuthenticationMethodOAuth2AccessToken = sourceAuth0AuthenticationMethodOAuth2AccessToken + u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2AccessToken return nil } - sourceAuth0AuthenticationMethodOAuth2AccessToken := new(SourceAuth0AuthenticationMethodOAuth2AccessToken) + sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication := new(SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAuth0AuthenticationMethodOAuth2AccessToken); err == nil { - u.SourceAuth0AuthenticationMethodOAuth2AccessToken = sourceAuth0AuthenticationMethodOAuth2AccessToken - u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2AccessToken + if err := d.Decode(&sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication); err == nil { + u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication = sourceAuth0AuthenticationMethodOAuth2ConfidentialApplication + u.Type = SourceAuth0AuthenticationMethodTypeSourceAuth0AuthenticationMethodOAuth2ConfidentialApplication return nil } @@ -130,14 +130,14 @@ func (u *SourceAuth0AuthenticationMethod) UnmarshalJSON(data []byte) error { } func (u SourceAuth0AuthenticationMethod) MarshalJSON() ([]byte, error) { - if u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication != nil { - return json.Marshal(u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication) - } - if u.SourceAuth0AuthenticationMethodOAuth2AccessToken != nil { return json.Marshal(u.SourceAuth0AuthenticationMethodOAuth2AccessToken) } + if u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication != nil { + return json.Marshal(u.SourceAuth0AuthenticationMethodOAuth2ConfidentialApplication) + } + return nil, nil } @@ -170,4 +170,6 @@ type SourceAuth0 struct { BaseURL string `json:"base_url"` Credentials SourceAuth0AuthenticationMethod `json:"credentials"` SourceType SourceAuth0Auth0 `json:"sourceType"` + // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. + StartDate *string `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourceauth0update.go b/internal/sdk/pkg/models/shared/sourceauth0update.go index e7d6c85f3..6b35171d2 100755 --- a/internal/sdk/pkg/models/shared/sourceauth0update.go +++ b/internal/sdk/pkg/models/shared/sourceauth0update.go @@ -108,21 +108,21 @@ func CreateSourceAuth0UpdateAuthenticationMethodSourceAuth0UpdateAuthenticationM func (u *SourceAuth0UpdateAuthenticationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication := new(SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication) + sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken := new(SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication); err == nil { - u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication = sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication - u.Type = SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication + if err := d.Decode(&sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken); err == nil { + u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken = sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken + u.Type = SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken return nil } - sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken := new(SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken) + sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication := new(SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken); err == nil { - u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken = sourceAuth0UpdateAuthenticationMethodOAuth2AccessToken - u.Type = SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2AccessToken + if err := d.Decode(&sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication); err == nil { + u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication = sourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication + u.Type = SourceAuth0UpdateAuthenticationMethodTypeSourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication return nil } @@ -130,14 +130,14 @@ func (u *SourceAuth0UpdateAuthenticationMethod) UnmarshalJSON(data []byte) error } func (u SourceAuth0UpdateAuthenticationMethod) MarshalJSON() ([]byte, error) { - if u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication != nil { - return json.Marshal(u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication) - } - if u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken != nil { return json.Marshal(u.SourceAuth0UpdateAuthenticationMethodOAuth2AccessToken) } + if u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication != nil { + return json.Marshal(u.SourceAuth0UpdateAuthenticationMethodOAuth2ConfidentialApplication) + } + return nil, nil } @@ -145,4 +145,6 @@ type SourceAuth0Update struct { // The Authentication API is served over HTTPS. All URLs referenced in the documentation have the following base `https://YOUR_DOMAIN` BaseURL string `json:"base_url"` Credentials SourceAuth0UpdateAuthenticationMethod `json:"credentials"` + // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. + StartDate *string `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcedatadog.go b/internal/sdk/pkg/models/shared/sourcedatadog.go deleted file mode 100755 index d8fa60fa0..000000000 --- a/internal/sdk/pkg/models/shared/sourcedatadog.go +++ /dev/null @@ -1,132 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -import ( - "encoding/json" - "fmt" -) - -// SourceDatadogQueriesDataSource - A data source that is powered by the platform. -type SourceDatadogQueriesDataSource string - -const ( - SourceDatadogQueriesDataSourceMetrics SourceDatadogQueriesDataSource = "metrics" - SourceDatadogQueriesDataSourceCloudCost SourceDatadogQueriesDataSource = "cloud_cost" - SourceDatadogQueriesDataSourceLogs SourceDatadogQueriesDataSource = "logs" - SourceDatadogQueriesDataSourceRum SourceDatadogQueriesDataSource = "rum" -) - -func (e SourceDatadogQueriesDataSource) ToPointer() *SourceDatadogQueriesDataSource { - return &e -} - -func (e *SourceDatadogQueriesDataSource) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "metrics": - fallthrough - case "cloud_cost": - fallthrough - case "logs": - fallthrough - case "rum": - *e = SourceDatadogQueriesDataSource(v) - return nil - default: - return fmt.Errorf("invalid value for SourceDatadogQueriesDataSource: %v", v) - } -} - -type SourceDatadogQueries struct { - // A data source that is powered by the platform. - DataSource SourceDatadogQueriesDataSource `json:"data_source"` - // The variable name for use in queries. - Name string `json:"name"` - // A classic query string. - Query string `json:"query"` -} - -// SourceDatadogSite - The site where Datadog data resides in. -type SourceDatadogSite string - -const ( - SourceDatadogSiteDatadoghqCom SourceDatadogSite = "datadoghq.com" - SourceDatadogSiteUs3DatadoghqCom SourceDatadogSite = "us3.datadoghq.com" - SourceDatadogSiteUs5DatadoghqCom SourceDatadogSite = "us5.datadoghq.com" - SourceDatadogSiteDatadoghqEu SourceDatadogSite = "datadoghq.eu" - SourceDatadogSiteDdogGovCom SourceDatadogSite = "ddog-gov.com" -) - -func (e SourceDatadogSite) ToPointer() *SourceDatadogSite { - return &e -} - -func (e *SourceDatadogSite) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "datadoghq.com": - fallthrough - case "us3.datadoghq.com": - fallthrough - case "us5.datadoghq.com": - fallthrough - case "datadoghq.eu": - fallthrough - case "ddog-gov.com": - *e = SourceDatadogSite(v) - return nil - default: - return fmt.Errorf("invalid value for SourceDatadogSite: %v", v) - } -} - -type SourceDatadogDatadog string - -const ( - SourceDatadogDatadogDatadog SourceDatadogDatadog = "datadog" -) - -func (e SourceDatadogDatadog) ToPointer() *SourceDatadogDatadog { - return &e -} - -func (e *SourceDatadogDatadog) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "datadog": - *e = SourceDatadogDatadog(v) - return nil - default: - return fmt.Errorf("invalid value for SourceDatadogDatadog: %v", v) - } -} - -type SourceDatadog struct { - // Datadog API key - APIKey string `json:"api_key"` - // Datadog application key - ApplicationKey string `json:"application_key"` - // UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. - EndDate *string `json:"end_date,omitempty"` - // Maximum number of records to collect per request. - MaxRecordsPerRequest *int64 `json:"max_records_per_request,omitempty"` - // List of queries to be run and used as inputs. - Queries []SourceDatadogQueries `json:"queries,omitempty"` - // The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. - Query *string `json:"query,omitempty"` - // The site where Datadog data resides in. - Site *SourceDatadogSite `json:"site,omitempty"` - SourceType SourceDatadogDatadog `json:"sourceType"` - // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. - StartDate *string `json:"start_date,omitempty"` -} diff --git a/internal/sdk/pkg/models/shared/sourcedatadogcreaterequest.go b/internal/sdk/pkg/models/shared/sourcedatadogcreaterequest.go deleted file mode 100755 index acee89d30..000000000 --- a/internal/sdk/pkg/models/shared/sourcedatadogcreaterequest.go +++ /dev/null @@ -1,11 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -type SourceDatadogCreateRequest struct { - Configuration SourceDatadog `json:"configuration"` - Name string `json:"name"` - // Optional secretID obtained through the public API OAuth redirect flow. - SecretID *string `json:"secretId,omitempty"` - WorkspaceID string `json:"workspaceId"` -} diff --git a/internal/sdk/pkg/models/shared/sourcedatadogupdate.go b/internal/sdk/pkg/models/shared/sourcedatadogupdate.go deleted file mode 100755 index 0ae005c9d..000000000 --- a/internal/sdk/pkg/models/shared/sourcedatadogupdate.go +++ /dev/null @@ -1,107 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -import ( - "encoding/json" - "fmt" -) - -// SourceDatadogUpdateQueriesDataSource - A data source that is powered by the platform. -type SourceDatadogUpdateQueriesDataSource string - -const ( - SourceDatadogUpdateQueriesDataSourceMetrics SourceDatadogUpdateQueriesDataSource = "metrics" - SourceDatadogUpdateQueriesDataSourceCloudCost SourceDatadogUpdateQueriesDataSource = "cloud_cost" - SourceDatadogUpdateQueriesDataSourceLogs SourceDatadogUpdateQueriesDataSource = "logs" - SourceDatadogUpdateQueriesDataSourceRum SourceDatadogUpdateQueriesDataSource = "rum" -) - -func (e SourceDatadogUpdateQueriesDataSource) ToPointer() *SourceDatadogUpdateQueriesDataSource { - return &e -} - -func (e *SourceDatadogUpdateQueriesDataSource) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "metrics": - fallthrough - case "cloud_cost": - fallthrough - case "logs": - fallthrough - case "rum": - *e = SourceDatadogUpdateQueriesDataSource(v) - return nil - default: - return fmt.Errorf("invalid value for SourceDatadogUpdateQueriesDataSource: %v", v) - } -} - -type SourceDatadogUpdateQueries struct { - // A data source that is powered by the platform. - DataSource SourceDatadogUpdateQueriesDataSource `json:"data_source"` - // The variable name for use in queries. - Name string `json:"name"` - // A classic query string. - Query string `json:"query"` -} - -// SourceDatadogUpdateSite - The site where Datadog data resides in. -type SourceDatadogUpdateSite string - -const ( - SourceDatadogUpdateSiteDatadoghqCom SourceDatadogUpdateSite = "datadoghq.com" - SourceDatadogUpdateSiteUs3DatadoghqCom SourceDatadogUpdateSite = "us3.datadoghq.com" - SourceDatadogUpdateSiteUs5DatadoghqCom SourceDatadogUpdateSite = "us5.datadoghq.com" - SourceDatadogUpdateSiteDatadoghqEu SourceDatadogUpdateSite = "datadoghq.eu" - SourceDatadogUpdateSiteDdogGovCom SourceDatadogUpdateSite = "ddog-gov.com" -) - -func (e SourceDatadogUpdateSite) ToPointer() *SourceDatadogUpdateSite { - return &e -} - -func (e *SourceDatadogUpdateSite) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "datadoghq.com": - fallthrough - case "us3.datadoghq.com": - fallthrough - case "us5.datadoghq.com": - fallthrough - case "datadoghq.eu": - fallthrough - case "ddog-gov.com": - *e = SourceDatadogUpdateSite(v) - return nil - default: - return fmt.Errorf("invalid value for SourceDatadogUpdateSite: %v", v) - } -} - -type SourceDatadogUpdate struct { - // Datadog API key - APIKey string `json:"api_key"` - // Datadog application key - ApplicationKey string `json:"application_key"` - // UTC date and time in the format 2017-01-25T00:00:00Z. Data after this date will not be replicated. An empty value will represent the current datetime for each execution. This just applies to Incremental syncs. - EndDate *string `json:"end_date,omitempty"` - // Maximum number of records to collect per request. - MaxRecordsPerRequest *int64 `json:"max_records_per_request,omitempty"` - // List of queries to be run and used as inputs. - Queries []SourceDatadogUpdateQueries `json:"queries,omitempty"` - // The search query. This just applies to Incremental syncs. If empty, it'll collect all logs. - Query *string `json:"query,omitempty"` - // The site where Datadog data resides in. - Site *SourceDatadogUpdateSite `json:"site,omitempty"` - // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. This just applies to Incremental syncs. - StartDate *string `json:"start_date,omitempty"` -} diff --git a/internal/sdk/pkg/models/shared/sourcee2etestcloud.go b/internal/sdk/pkg/models/shared/sourcee2etestcloud.go index 118a2eb8c..fa916fe3a 100755 --- a/internal/sdk/pkg/models/shared/sourcee2etestcloud.go +++ b/internal/sdk/pkg/models/shared/sourcee2etestcloud.go @@ -110,21 +110,21 @@ func CreateSourceE2eTestCloudMockCatalogSourceE2eTestCloudMockCatalogMultiSchema func (u *SourceE2eTestCloudMockCatalog) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceE2eTestCloudMockCatalogSingleSchema := new(SourceE2eTestCloudMockCatalogSingleSchema) + sourceE2eTestCloudMockCatalogMultiSchema := new(SourceE2eTestCloudMockCatalogMultiSchema) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceE2eTestCloudMockCatalogSingleSchema); err == nil { - u.SourceE2eTestCloudMockCatalogSingleSchema = sourceE2eTestCloudMockCatalogSingleSchema - u.Type = SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogSingleSchema + if err := d.Decode(&sourceE2eTestCloudMockCatalogMultiSchema); err == nil { + u.SourceE2eTestCloudMockCatalogMultiSchema = sourceE2eTestCloudMockCatalogMultiSchema + u.Type = SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogMultiSchema return nil } - sourceE2eTestCloudMockCatalogMultiSchema := new(SourceE2eTestCloudMockCatalogMultiSchema) + sourceE2eTestCloudMockCatalogSingleSchema := new(SourceE2eTestCloudMockCatalogSingleSchema) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceE2eTestCloudMockCatalogMultiSchema); err == nil { - u.SourceE2eTestCloudMockCatalogMultiSchema = sourceE2eTestCloudMockCatalogMultiSchema - u.Type = SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogMultiSchema + if err := d.Decode(&sourceE2eTestCloudMockCatalogSingleSchema); err == nil { + u.SourceE2eTestCloudMockCatalogSingleSchema = sourceE2eTestCloudMockCatalogSingleSchema + u.Type = SourceE2eTestCloudMockCatalogTypeSourceE2eTestCloudMockCatalogSingleSchema return nil } @@ -132,14 +132,14 @@ func (u *SourceE2eTestCloudMockCatalog) UnmarshalJSON(data []byte) error { } func (u SourceE2eTestCloudMockCatalog) MarshalJSON() ([]byte, error) { - if u.SourceE2eTestCloudMockCatalogSingleSchema != nil { - return json.Marshal(u.SourceE2eTestCloudMockCatalogSingleSchema) - } - if u.SourceE2eTestCloudMockCatalogMultiSchema != nil { return json.Marshal(u.SourceE2eTestCloudMockCatalogMultiSchema) } + if u.SourceE2eTestCloudMockCatalogSingleSchema != nil { + return json.Marshal(u.SourceE2eTestCloudMockCatalogSingleSchema) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go b/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go index 7c5f5a8a4..314f387b3 100755 --- a/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go +++ b/internal/sdk/pkg/models/shared/sourcee2etestcloudupdate.go @@ -110,21 +110,21 @@ func CreateSourceE2eTestCloudUpdateMockCatalogSourceE2eTestCloudUpdateMockCatalo func (u *SourceE2eTestCloudUpdateMockCatalog) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceE2eTestCloudUpdateMockCatalogSingleSchema := new(SourceE2eTestCloudUpdateMockCatalogSingleSchema) + sourceE2eTestCloudUpdateMockCatalogMultiSchema := new(SourceE2eTestCloudUpdateMockCatalogMultiSchema) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceE2eTestCloudUpdateMockCatalogSingleSchema); err == nil { - u.SourceE2eTestCloudUpdateMockCatalogSingleSchema = sourceE2eTestCloudUpdateMockCatalogSingleSchema - u.Type = SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogSingleSchema + if err := d.Decode(&sourceE2eTestCloudUpdateMockCatalogMultiSchema); err == nil { + u.SourceE2eTestCloudUpdateMockCatalogMultiSchema = sourceE2eTestCloudUpdateMockCatalogMultiSchema + u.Type = SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogMultiSchema return nil } - sourceE2eTestCloudUpdateMockCatalogMultiSchema := new(SourceE2eTestCloudUpdateMockCatalogMultiSchema) + sourceE2eTestCloudUpdateMockCatalogSingleSchema := new(SourceE2eTestCloudUpdateMockCatalogSingleSchema) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceE2eTestCloudUpdateMockCatalogMultiSchema); err == nil { - u.SourceE2eTestCloudUpdateMockCatalogMultiSchema = sourceE2eTestCloudUpdateMockCatalogMultiSchema - u.Type = SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogMultiSchema + if err := d.Decode(&sourceE2eTestCloudUpdateMockCatalogSingleSchema); err == nil { + u.SourceE2eTestCloudUpdateMockCatalogSingleSchema = sourceE2eTestCloudUpdateMockCatalogSingleSchema + u.Type = SourceE2eTestCloudUpdateMockCatalogTypeSourceE2eTestCloudUpdateMockCatalogSingleSchema return nil } @@ -132,14 +132,14 @@ func (u *SourceE2eTestCloudUpdateMockCatalog) UnmarshalJSON(data []byte) error { } func (u SourceE2eTestCloudUpdateMockCatalog) MarshalJSON() ([]byte, error) { - if u.SourceE2eTestCloudUpdateMockCatalogSingleSchema != nil { - return json.Marshal(u.SourceE2eTestCloudUpdateMockCatalogSingleSchema) - } - if u.SourceE2eTestCloudUpdateMockCatalogMultiSchema != nil { return json.Marshal(u.SourceE2eTestCloudUpdateMockCatalogMultiSchema) } + if u.SourceE2eTestCloudUpdateMockCatalogSingleSchema != nil { + return json.Marshal(u.SourceE2eTestCloudUpdateMockCatalogSingleSchema) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegithub.go b/internal/sdk/pkg/models/shared/sourcegithub.go index f1f5c18c5..152dca92f 100755 --- a/internal/sdk/pkg/models/shared/sourcegithub.go +++ b/internal/sdk/pkg/models/shared/sourcegithub.go @@ -111,21 +111,21 @@ func CreateSourceGithubAuthenticationSourceGithubAuthenticationPersonalAccessTok func (u *SourceGithubAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGithubAuthenticationOAuth := new(SourceGithubAuthenticationOAuth) + sourceGithubAuthenticationPersonalAccessToken := new(SourceGithubAuthenticationPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGithubAuthenticationOAuth); err == nil { - u.SourceGithubAuthenticationOAuth = sourceGithubAuthenticationOAuth - u.Type = SourceGithubAuthenticationTypeSourceGithubAuthenticationOAuth + if err := d.Decode(&sourceGithubAuthenticationPersonalAccessToken); err == nil { + u.SourceGithubAuthenticationPersonalAccessToken = sourceGithubAuthenticationPersonalAccessToken + u.Type = SourceGithubAuthenticationTypeSourceGithubAuthenticationPersonalAccessToken return nil } - sourceGithubAuthenticationPersonalAccessToken := new(SourceGithubAuthenticationPersonalAccessToken) + sourceGithubAuthenticationOAuth := new(SourceGithubAuthenticationOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGithubAuthenticationPersonalAccessToken); err == nil { - u.SourceGithubAuthenticationPersonalAccessToken = sourceGithubAuthenticationPersonalAccessToken - u.Type = SourceGithubAuthenticationTypeSourceGithubAuthenticationPersonalAccessToken + if err := d.Decode(&sourceGithubAuthenticationOAuth); err == nil { + u.SourceGithubAuthenticationOAuth = sourceGithubAuthenticationOAuth + u.Type = SourceGithubAuthenticationTypeSourceGithubAuthenticationOAuth return nil } @@ -133,14 +133,14 @@ func (u *SourceGithubAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceGithubAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceGithubAuthenticationOAuth != nil { - return json.Marshal(u.SourceGithubAuthenticationOAuth) - } - if u.SourceGithubAuthenticationPersonalAccessToken != nil { return json.Marshal(u.SourceGithubAuthenticationPersonalAccessToken) } + if u.SourceGithubAuthenticationOAuth != nil { + return json.Marshal(u.SourceGithubAuthenticationOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegithubupdate.go b/internal/sdk/pkg/models/shared/sourcegithubupdate.go index d31e3c2b7..9f08a7a53 100755 --- a/internal/sdk/pkg/models/shared/sourcegithubupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegithubupdate.go @@ -111,21 +111,21 @@ func CreateSourceGithubUpdateAuthenticationSourceGithubUpdateAuthenticationPerso func (u *SourceGithubUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGithubUpdateAuthenticationOAuth := new(SourceGithubUpdateAuthenticationOAuth) + sourceGithubUpdateAuthenticationPersonalAccessToken := new(SourceGithubUpdateAuthenticationPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGithubUpdateAuthenticationOAuth); err == nil { - u.SourceGithubUpdateAuthenticationOAuth = sourceGithubUpdateAuthenticationOAuth - u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationOAuth + if err := d.Decode(&sourceGithubUpdateAuthenticationPersonalAccessToken); err == nil { + u.SourceGithubUpdateAuthenticationPersonalAccessToken = sourceGithubUpdateAuthenticationPersonalAccessToken + u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationPersonalAccessToken return nil } - sourceGithubUpdateAuthenticationPersonalAccessToken := new(SourceGithubUpdateAuthenticationPersonalAccessToken) + sourceGithubUpdateAuthenticationOAuth := new(SourceGithubUpdateAuthenticationOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGithubUpdateAuthenticationPersonalAccessToken); err == nil { - u.SourceGithubUpdateAuthenticationPersonalAccessToken = sourceGithubUpdateAuthenticationPersonalAccessToken - u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationPersonalAccessToken + if err := d.Decode(&sourceGithubUpdateAuthenticationOAuth); err == nil { + u.SourceGithubUpdateAuthenticationOAuth = sourceGithubUpdateAuthenticationOAuth + u.Type = SourceGithubUpdateAuthenticationTypeSourceGithubUpdateAuthenticationOAuth return nil } @@ -133,14 +133,14 @@ func (u *SourceGithubUpdateAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceGithubUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceGithubUpdateAuthenticationOAuth != nil { - return json.Marshal(u.SourceGithubUpdateAuthenticationOAuth) - } - if u.SourceGithubUpdateAuthenticationPersonalAccessToken != nil { return json.Marshal(u.SourceGithubUpdateAuthenticationPersonalAccessToken) } + if u.SourceGithubUpdateAuthenticationOAuth != nil { + return json.Marshal(u.SourceGithubUpdateAuthenticationOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegitlab.go b/internal/sdk/pkg/models/shared/sourcegitlab.go index d8de3a499..a8dcf47da 100755 --- a/internal/sdk/pkg/models/shared/sourcegitlab.go +++ b/internal/sdk/pkg/models/shared/sourcegitlab.go @@ -113,21 +113,21 @@ func CreateSourceGitlabAuthorizationMethodSourceGitlabAuthorizationMethodPrivate func (u *SourceGitlabAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGitlabAuthorizationMethodOAuth20 := new(SourceGitlabAuthorizationMethodOAuth20) + sourceGitlabAuthorizationMethodPrivateToken := new(SourceGitlabAuthorizationMethodPrivateToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGitlabAuthorizationMethodOAuth20); err == nil { - u.SourceGitlabAuthorizationMethodOAuth20 = sourceGitlabAuthorizationMethodOAuth20 - u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodOAuth20 + if err := d.Decode(&sourceGitlabAuthorizationMethodPrivateToken); err == nil { + u.SourceGitlabAuthorizationMethodPrivateToken = sourceGitlabAuthorizationMethodPrivateToken + u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodPrivateToken return nil } - sourceGitlabAuthorizationMethodPrivateToken := new(SourceGitlabAuthorizationMethodPrivateToken) + sourceGitlabAuthorizationMethodOAuth20 := new(SourceGitlabAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGitlabAuthorizationMethodPrivateToken); err == nil { - u.SourceGitlabAuthorizationMethodPrivateToken = sourceGitlabAuthorizationMethodPrivateToken - u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodPrivateToken + if err := d.Decode(&sourceGitlabAuthorizationMethodOAuth20); err == nil { + u.SourceGitlabAuthorizationMethodOAuth20 = sourceGitlabAuthorizationMethodOAuth20 + u.Type = SourceGitlabAuthorizationMethodTypeSourceGitlabAuthorizationMethodOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceGitlabAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceGitlabAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceGitlabAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceGitlabAuthorizationMethodOAuth20) - } - if u.SourceGitlabAuthorizationMethodPrivateToken != nil { return json.Marshal(u.SourceGitlabAuthorizationMethodPrivateToken) } + if u.SourceGitlabAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceGitlabAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegitlabupdate.go b/internal/sdk/pkg/models/shared/sourcegitlabupdate.go index 4dd321138..6cf89870f 100755 --- a/internal/sdk/pkg/models/shared/sourcegitlabupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegitlabupdate.go @@ -113,21 +113,21 @@ func CreateSourceGitlabUpdateAuthorizationMethodSourceGitlabUpdateAuthorizationM func (u *SourceGitlabUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGitlabUpdateAuthorizationMethodOAuth20 := new(SourceGitlabUpdateAuthorizationMethodOAuth20) + sourceGitlabUpdateAuthorizationMethodPrivateToken := new(SourceGitlabUpdateAuthorizationMethodPrivateToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGitlabUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceGitlabUpdateAuthorizationMethodOAuth20 = sourceGitlabUpdateAuthorizationMethodOAuth20 - u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceGitlabUpdateAuthorizationMethodPrivateToken); err == nil { + u.SourceGitlabUpdateAuthorizationMethodPrivateToken = sourceGitlabUpdateAuthorizationMethodPrivateToken + u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodPrivateToken return nil } - sourceGitlabUpdateAuthorizationMethodPrivateToken := new(SourceGitlabUpdateAuthorizationMethodPrivateToken) + sourceGitlabUpdateAuthorizationMethodOAuth20 := new(SourceGitlabUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGitlabUpdateAuthorizationMethodPrivateToken); err == nil { - u.SourceGitlabUpdateAuthorizationMethodPrivateToken = sourceGitlabUpdateAuthorizationMethodPrivateToken - u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodPrivateToken + if err := d.Decode(&sourceGitlabUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceGitlabUpdateAuthorizationMethodOAuth20 = sourceGitlabUpdateAuthorizationMethodOAuth20 + u.Type = SourceGitlabUpdateAuthorizationMethodTypeSourceGitlabUpdateAuthorizationMethodOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceGitlabUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error } func (u SourceGitlabUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceGitlabUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceGitlabUpdateAuthorizationMethodOAuth20) - } - if u.SourceGitlabUpdateAuthorizationMethodPrivateToken != nil { return json.Marshal(u.SourceGitlabUpdateAuthorizationMethodPrivateToken) } + if u.SourceGitlabUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceGitlabUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegoogleads.go b/internal/sdk/pkg/models/shared/sourcegoogleads.go index 147c9445b..d0d401ed9 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogleads.go +++ b/internal/sdk/pkg/models/shared/sourcegoogleads.go @@ -59,11 +59,11 @@ type SourceGoogleAds struct { CustomQueries []SourceGoogleAdsCustomQueries `json:"custom_queries,omitempty"` // Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation. CustomerID string `json:"customer_id"` - // UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. + // UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set) EndDate *types.Date `json:"end_date,omitempty"` // If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation. LoginCustomerID *string `json:"login_customer_id,omitempty"` SourceType SourceGoogleAdsGoogleAds `json:"sourceType"` - // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. - StartDate types.Date `json:"start_date"` + // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set) + StartDate *types.Date `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go b/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go index 73a84ecf3..8da356740 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegoogleadsupdate.go @@ -33,10 +33,10 @@ type SourceGoogleAdsUpdate struct { CustomQueries []SourceGoogleAdsUpdateCustomQueries `json:"custom_queries,omitempty"` // Comma-separated list of (client) customer IDs. Each customer ID must be specified as a 10-digit number without dashes. For detailed instructions on finding this value, refer to our documentation. CustomerID string `json:"customer_id"` - // UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. + // UTC date in the format YYYY-MM-DD. Any data after this date will not be replicated. (Default value of today is used if not set) EndDate *types.Date `json:"end_date,omitempty"` // If your access to the customer account is through a manager account, this field is required, and must be set to the 10-digit customer ID of the manager account. For more information about this field, refer to Google's documentation. LoginCustomerID *string `json:"login_customer_id,omitempty"` - // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. - StartDate types.Date `json:"start_date"` + // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. (Default value of two years ago is used if not set) + StartDate *types.Date `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go index 1d3e18a73..7ce86a4bd 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go +++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapi.go @@ -113,21 +113,21 @@ func CreateSourceGoogleAnalyticsDataAPICredentialsSourceGoogleAnalyticsDataAPICr func (u *SourceGoogleAnalyticsDataAPICredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth) + sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth); err == nil { - u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth - u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth + if err := d.Decode(&sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication + u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication return nil } - sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication) + sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication - u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth); err == nil { + u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth + u.Type = SourceGoogleAnalyticsDataAPICredentialsTypeSourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth return nil } @@ -135,14 +135,14 @@ func (u *SourceGoogleAnalyticsDataAPICredentials) UnmarshalJSON(data []byte) err } func (u SourceGoogleAnalyticsDataAPICredentials) MarshalJSON() ([]byte, error) { - if u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth != nil { - return json.Marshal(u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth) - } - if u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication != nil { return json.Marshal(u.SourceGoogleAnalyticsDataAPICredentialsServiceAccountKeyAuthentication) } + if u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth != nil { + return json.Marshal(u.SourceGoogleAnalyticsDataAPICredentialsAuthenticateViaGoogleOauth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go index c2d1278aa..9cbd8b504 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsdataapiupdate.go @@ -113,21 +113,21 @@ func CreateSourceGoogleAnalyticsDataAPIUpdateCredentialsSourceGoogleAnalyticsDat func (u *SourceGoogleAnalyticsDataAPIUpdateCredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth) + sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth); err == nil { - u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth - u.Type = SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth + if err := d.Decode(&sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication + u.Type = SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication return nil } - sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication) + sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication - u.Type = SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth); err == nil { + u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth + u.Type = SourceGoogleAnalyticsDataAPIUpdateCredentialsTypeSourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth return nil } @@ -135,14 +135,14 @@ func (u *SourceGoogleAnalyticsDataAPIUpdateCredentials) UnmarshalJSON(data []byt } func (u SourceGoogleAnalyticsDataAPIUpdateCredentials) MarshalJSON() ([]byte, error) { - if u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth != nil { - return json.Marshal(u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth) - } - if u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication != nil { return json.Marshal(u.SourceGoogleAnalyticsDataAPIUpdateCredentialsServiceAccountKeyAuthentication) } + if u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth != nil { + return json.Marshal(u.SourceGoogleAnalyticsDataAPIUpdateCredentialsAuthenticateViaGoogleOauth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go index 5c3a3ad46..7a376a55d 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go +++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4.go @@ -113,21 +113,21 @@ func CreateSourceGoogleAnalyticsV4CredentialsSourceGoogleAnalyticsV4CredentialsS func (u *SourceGoogleAnalyticsV4Credentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth) + sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth); err == nil { - u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth - u.Type = SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth + if err := d.Decode(&sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication + u.Type = SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication return nil } - sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication) + sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication - u.Type = SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth); err == nil { + u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth + u.Type = SourceGoogleAnalyticsV4CredentialsTypeSourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth return nil } @@ -135,14 +135,14 @@ func (u *SourceGoogleAnalyticsV4Credentials) UnmarshalJSON(data []byte) error { } func (u SourceGoogleAnalyticsV4Credentials) MarshalJSON() ([]byte, error) { - if u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth != nil { - return json.Marshal(u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth) - } - if u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication != nil { return json.Marshal(u.SourceGoogleAnalyticsV4CredentialsServiceAccountKeyAuthentication) } + if u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth != nil { + return json.Marshal(u.SourceGoogleAnalyticsV4CredentialsAuthenticateViaGoogleOauth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go index 87dff0f17..3889f22a0 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go +++ b/internal/sdk/pkg/models/shared/sourcegoogleanalyticsv4update.go @@ -113,21 +113,21 @@ func CreateSourceGoogleAnalyticsV4UpdateCredentialsSourceGoogleAnalyticsV4Update func (u *SourceGoogleAnalyticsV4UpdateCredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth) + sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth); err == nil { - u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth - u.Type = SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth + if err := d.Decode(&sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication + u.Type = SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication return nil } - sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication := new(SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication) + sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth := new(SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication = sourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication - u.Type = SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth); err == nil { + u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth = sourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth + u.Type = SourceGoogleAnalyticsV4UpdateCredentialsTypeSourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth return nil } @@ -135,14 +135,14 @@ func (u *SourceGoogleAnalyticsV4UpdateCredentials) UnmarshalJSON(data []byte) er } func (u SourceGoogleAnalyticsV4UpdateCredentials) MarshalJSON() ([]byte, error) { - if u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth != nil { - return json.Marshal(u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth) - } - if u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication != nil { return json.Marshal(u.SourceGoogleAnalyticsV4UpdateCredentialsServiceAccountKeyAuthentication) } + if u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth != nil { + return json.Marshal(u.SourceGoogleAnalyticsV4UpdateCredentialsAuthenticateViaGoogleOauth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegoogledirectory.go b/internal/sdk/pkg/models/shared/sourcegoogledirectory.go index e3082745c..d794413b5 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogledirectory.go +++ b/internal/sdk/pkg/models/shared/sourcegoogledirectory.go @@ -116,21 +116,21 @@ func CreateSourceGoogleDirectoryGoogleCredentialsSourceGoogleDirectoryGoogleCred func (u *SourceGoogleDirectoryGoogleCredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth := new(SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth) + sourceGoogleDirectoryGoogleCredentialsServiceAccountKey := new(SourceGoogleDirectoryGoogleCredentialsServiceAccountKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth); err == nil { - u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth = sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth - u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth + if err := d.Decode(&sourceGoogleDirectoryGoogleCredentialsServiceAccountKey); err == nil { + u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey = sourceGoogleDirectoryGoogleCredentialsServiceAccountKey + u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsServiceAccountKey return nil } - sourceGoogleDirectoryGoogleCredentialsServiceAccountKey := new(SourceGoogleDirectoryGoogleCredentialsServiceAccountKey) + sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth := new(SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleDirectoryGoogleCredentialsServiceAccountKey); err == nil { - u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey = sourceGoogleDirectoryGoogleCredentialsServiceAccountKey - u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsServiceAccountKey + if err := d.Decode(&sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth); err == nil { + u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth = sourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth + u.Type = SourceGoogleDirectoryGoogleCredentialsTypeSourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth return nil } @@ -138,14 +138,14 @@ func (u *SourceGoogleDirectoryGoogleCredentials) UnmarshalJSON(data []byte) erro } func (u SourceGoogleDirectoryGoogleCredentials) MarshalJSON() ([]byte, error) { - if u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth != nil { - return json.Marshal(u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth) - } - if u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey != nil { return json.Marshal(u.SourceGoogleDirectoryGoogleCredentialsServiceAccountKey) } + if u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth != nil { + return json.Marshal(u.SourceGoogleDirectoryGoogleCredentialsSignInViaGoogleOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go b/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go index 491bb82e9..cc2e16331 100755 --- a/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegoogledirectoryupdate.go @@ -116,21 +116,21 @@ func CreateSourceGoogleDirectoryUpdateGoogleCredentialsSourceGoogleDirectoryUpda func (u *SourceGoogleDirectoryUpdateGoogleCredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth := new(SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth) + sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey := new(SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth); err == nil { - u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth = sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth - u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth + if err := d.Decode(&sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey); err == nil { + u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey = sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey + u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey return nil } - sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey := new(SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey) + sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth := new(SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey); err == nil { - u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey = sourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey - u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey + if err := d.Decode(&sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth); err == nil { + u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth = sourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth + u.Type = SourceGoogleDirectoryUpdateGoogleCredentialsTypeSourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth return nil } @@ -138,14 +138,14 @@ func (u *SourceGoogleDirectoryUpdateGoogleCredentials) UnmarshalJSON(data []byte } func (u SourceGoogleDirectoryUpdateGoogleCredentials) MarshalJSON() ([]byte, error) { - if u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth != nil { - return json.Marshal(u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth) - } - if u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey != nil { return json.Marshal(u.SourceGoogleDirectoryUpdateGoogleCredentialsServiceAccountKey) } + if u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth != nil { + return json.Marshal(u.SourceGoogleDirectoryUpdateGoogleCredentialsSignInViaGoogleOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go b/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go index 725537f0f..7c9e662f0 100755 --- a/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go +++ b/internal/sdk/pkg/models/shared/sourcegooglesearchconsole.go @@ -113,21 +113,21 @@ func CreateSourceGoogleSearchConsoleAuthenticationTypeSourceGoogleSearchConsoleA func (u *SourceGoogleSearchConsoleAuthenticationType) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleSearchConsoleAuthenticationTypeOAuth := new(SourceGoogleSearchConsoleAuthenticationTypeOAuth) + sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSearchConsoleAuthenticationTypeOAuth); err == nil { - u.SourceGoogleSearchConsoleAuthenticationTypeOAuth = sourceGoogleSearchConsoleAuthenticationTypeOAuth - u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeOAuth + if err := d.Decode(&sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication = sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication + u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication return nil } - sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication) + sourceGoogleSearchConsoleAuthenticationTypeOAuth := new(SourceGoogleSearchConsoleAuthenticationTypeOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication = sourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication - u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleSearchConsoleAuthenticationTypeOAuth); err == nil { + u.SourceGoogleSearchConsoleAuthenticationTypeOAuth = sourceGoogleSearchConsoleAuthenticationTypeOAuth + u.Type = SourceGoogleSearchConsoleAuthenticationTypeTypeSourceGoogleSearchConsoleAuthenticationTypeOAuth return nil } @@ -135,30 +135,74 @@ func (u *SourceGoogleSearchConsoleAuthenticationType) UnmarshalJSON(data []byte) } func (u SourceGoogleSearchConsoleAuthenticationType) MarshalJSON() ([]byte, error) { + if u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication != nil { + return json.Marshal(u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication) + } + if u.SourceGoogleSearchConsoleAuthenticationTypeOAuth != nil { return json.Marshal(u.SourceGoogleSearchConsoleAuthenticationTypeOAuth) } - if u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication != nil { - return json.Marshal(u.SourceGoogleSearchConsoleAuthenticationTypeServiceAccountKeyAuthentication) + return nil, nil +} + +// SourceGoogleSearchConsoleCustomReportConfigValidEnums - An enumeration of dimensions. +type SourceGoogleSearchConsoleCustomReportConfigValidEnums string + +const ( + SourceGoogleSearchConsoleCustomReportConfigValidEnumsCountry SourceGoogleSearchConsoleCustomReportConfigValidEnums = "country" + SourceGoogleSearchConsoleCustomReportConfigValidEnumsDate SourceGoogleSearchConsoleCustomReportConfigValidEnums = "date" + SourceGoogleSearchConsoleCustomReportConfigValidEnumsDevice SourceGoogleSearchConsoleCustomReportConfigValidEnums = "device" + SourceGoogleSearchConsoleCustomReportConfigValidEnumsPage SourceGoogleSearchConsoleCustomReportConfigValidEnums = "page" + SourceGoogleSearchConsoleCustomReportConfigValidEnumsQuery SourceGoogleSearchConsoleCustomReportConfigValidEnums = "query" +) + +func (e SourceGoogleSearchConsoleCustomReportConfigValidEnums) ToPointer() *SourceGoogleSearchConsoleCustomReportConfigValidEnums { + return &e +} + +func (e *SourceGoogleSearchConsoleCustomReportConfigValidEnums) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err } + switch v { + case "country": + fallthrough + case "date": + fallthrough + case "device": + fallthrough + case "page": + fallthrough + case "query": + *e = SourceGoogleSearchConsoleCustomReportConfigValidEnums(v) + return nil + default: + return fmt.Errorf("invalid value for SourceGoogleSearchConsoleCustomReportConfigValidEnums: %v", v) + } +} - return nil, nil +type SourceGoogleSearchConsoleCustomReportConfig struct { + // A list of dimensions (country, date, device, page, query) + Dimensions []SourceGoogleSearchConsoleCustomReportConfigValidEnums `json:"dimensions"` + // The name of the custom report, this name would be used as stream name + Name string `json:"name"` } -// SourceGoogleSearchConsoleDataState - If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. -type SourceGoogleSearchConsoleDataState string +// SourceGoogleSearchConsoleDataFreshness - If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. +type SourceGoogleSearchConsoleDataFreshness string const ( - SourceGoogleSearchConsoleDataStateFinal SourceGoogleSearchConsoleDataState = "final" - SourceGoogleSearchConsoleDataStateAll SourceGoogleSearchConsoleDataState = "all" + SourceGoogleSearchConsoleDataFreshnessFinal SourceGoogleSearchConsoleDataFreshness = "final" + SourceGoogleSearchConsoleDataFreshnessAll SourceGoogleSearchConsoleDataFreshness = "all" ) -func (e SourceGoogleSearchConsoleDataState) ToPointer() *SourceGoogleSearchConsoleDataState { +func (e SourceGoogleSearchConsoleDataFreshness) ToPointer() *SourceGoogleSearchConsoleDataFreshness { return &e } -func (e *SourceGoogleSearchConsoleDataState) UnmarshalJSON(data []byte) error { +func (e *SourceGoogleSearchConsoleDataFreshness) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err @@ -167,10 +211,10 @@ func (e *SourceGoogleSearchConsoleDataState) UnmarshalJSON(data []byte) error { case "final": fallthrough case "all": - *e = SourceGoogleSearchConsoleDataState(v) + *e = SourceGoogleSearchConsoleDataFreshness(v) return nil default: - return fmt.Errorf("invalid value for SourceGoogleSearchConsoleDataState: %v", v) + return fmt.Errorf("invalid value for SourceGoogleSearchConsoleDataFreshness: %v", v) } } @@ -200,15 +244,17 @@ func (e *SourceGoogleSearchConsoleGoogleSearchConsole) UnmarshalJSON(data []byte type SourceGoogleSearchConsole struct { Authorization SourceGoogleSearchConsoleAuthenticationType `json:"authorization"` - // A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field. + // (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports. CustomReports *string `json:"custom_reports,omitempty"` - // If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. - DataState *SourceGoogleSearchConsoleDataState `json:"data_state,omitempty"` - // UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field. + // You can add your Custom Analytics report by creating one. + CustomReportsArray []SourceGoogleSearchConsoleCustomReportConfig `json:"custom_reports_array,omitempty"` + // If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. + DataState *SourceGoogleSearchConsoleDataFreshness `json:"data_state,omitempty"` + // UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. EndDate *types.Date `json:"end_date,omitempty"` - // The URLs of the website property attached to your GSC account. Read more here. + // The URLs of the website property attached to your GSC account. Learn more about properties here. SiteUrls []string `json:"site_urls"` SourceType SourceGoogleSearchConsoleGoogleSearchConsole `json:"sourceType"` - // UTC date in the format 2017-01-25. Any data before this date will not be replicated. - StartDate types.Date `json:"start_date"` + // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. + StartDate *types.Date `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go b/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go index 112ff0dfb..cd5ebed57 100755 --- a/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegooglesearchconsoleupdate.go @@ -113,21 +113,21 @@ func CreateSourceGoogleSearchConsoleUpdateAuthenticationTypeSourceGoogleSearchCo func (u *SourceGoogleSearchConsoleUpdateAuthenticationType) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth := new(SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth) + sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth); err == nil { - u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth = sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth - u.Type = SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth + if err := d.Decode(&sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication = sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication + u.Type = SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication return nil } - sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication := new(SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication) + sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth := new(SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication = sourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication - u.Type = SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth); err == nil { + u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth = sourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth + u.Type = SourceGoogleSearchConsoleUpdateAuthenticationTypeTypeSourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth return nil } @@ -135,30 +135,74 @@ func (u *SourceGoogleSearchConsoleUpdateAuthenticationType) UnmarshalJSON(data [ } func (u SourceGoogleSearchConsoleUpdateAuthenticationType) MarshalJSON() ([]byte, error) { + if u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication != nil { + return json.Marshal(u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication) + } + if u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth != nil { return json.Marshal(u.SourceGoogleSearchConsoleUpdateAuthenticationTypeOAuth) } - if u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication != nil { - return json.Marshal(u.SourceGoogleSearchConsoleUpdateAuthenticationTypeServiceAccountKeyAuthentication) + return nil, nil +} + +// SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums - An enumeration of dimensions. +type SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums string + +const ( + SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsCountry SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "country" + SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsDate SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "date" + SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsDevice SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "device" + SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsPage SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "page" + SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnumsQuery SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums = "query" +) + +func (e SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums) ToPointer() *SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums { + return &e +} + +func (e *SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err } + switch v { + case "country": + fallthrough + case "date": + fallthrough + case "device": + fallthrough + case "page": + fallthrough + case "query": + *e = SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums(v) + return nil + default: + return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums: %v", v) + } +} - return nil, nil +type SourceGoogleSearchConsoleUpdateCustomReportConfig struct { + // A list of dimensions (country, date, device, page, query) + Dimensions []SourceGoogleSearchConsoleUpdateCustomReportConfigValidEnums `json:"dimensions"` + // The name of the custom report, this name would be used as stream name + Name string `json:"name"` } -// SourceGoogleSearchConsoleUpdateDataState - If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. -type SourceGoogleSearchConsoleUpdateDataState string +// SourceGoogleSearchConsoleUpdateDataFreshness - If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. +type SourceGoogleSearchConsoleUpdateDataFreshness string const ( - SourceGoogleSearchConsoleUpdateDataStateFinal SourceGoogleSearchConsoleUpdateDataState = "final" - SourceGoogleSearchConsoleUpdateDataStateAll SourceGoogleSearchConsoleUpdateDataState = "all" + SourceGoogleSearchConsoleUpdateDataFreshnessFinal SourceGoogleSearchConsoleUpdateDataFreshness = "final" + SourceGoogleSearchConsoleUpdateDataFreshnessAll SourceGoogleSearchConsoleUpdateDataFreshness = "all" ) -func (e SourceGoogleSearchConsoleUpdateDataState) ToPointer() *SourceGoogleSearchConsoleUpdateDataState { +func (e SourceGoogleSearchConsoleUpdateDataFreshness) ToPointer() *SourceGoogleSearchConsoleUpdateDataFreshness { return &e } -func (e *SourceGoogleSearchConsoleUpdateDataState) UnmarshalJSON(data []byte) error { +func (e *SourceGoogleSearchConsoleUpdateDataFreshness) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err @@ -167,23 +211,25 @@ func (e *SourceGoogleSearchConsoleUpdateDataState) UnmarshalJSON(data []byte) er case "final": fallthrough case "all": - *e = SourceGoogleSearchConsoleUpdateDataState(v) + *e = SourceGoogleSearchConsoleUpdateDataFreshness(v) return nil default: - return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateDataState: %v", v) + return fmt.Errorf("invalid value for SourceGoogleSearchConsoleUpdateDataFreshness: %v", v) } } type SourceGoogleSearchConsoleUpdate struct { Authorization SourceGoogleSearchConsoleUpdateAuthenticationType `json:"authorization"` - // A JSON array describing the custom reports you want to sync from Google Search Console. See the docs for more information about the exact format you can use to fill out this field. + // (DEPRCATED) A JSON array describing the custom reports you want to sync from Google Search Console. See our documentation for more information on formulating custom reports. CustomReports *string `json:"custom_reports,omitempty"` - // If "final" or if this parameter is omitted, the returned data will include only finalized data. Setting this parameter to "all" should not be used with Incremental Sync mode as it may cause data loss. If "all", data will include fresh data. - DataState *SourceGoogleSearchConsoleUpdateDataState `json:"data_state,omitempty"` - // UTC date in the format 2017-01-25. Any data after this date will not be replicated. Must be greater or equal to the start date field. + // You can add your Custom Analytics report by creating one. + CustomReportsArray []SourceGoogleSearchConsoleUpdateCustomReportConfig `json:"custom_reports_array,omitempty"` + // If set to 'final', the returned data will include only finalized, stable data. If set to 'all', fresh data will be included. When using Incremental sync mode, we do not recommend setting this parameter to 'all' as it may cause data loss. More information can be found in our full documentation. + DataState *SourceGoogleSearchConsoleUpdateDataFreshness `json:"data_state,omitempty"` + // UTC date in the format YYYY-MM-DD. Any data created after this date will not be replicated. Must be greater or equal to the start date field. Leaving this field blank will replicate all data from the start date onward. EndDate *types.Date `json:"end_date,omitempty"` - // The URLs of the website property attached to your GSC account. Read more here. + // The URLs of the website property attached to your GSC account. Learn more about properties here. SiteUrls []string `json:"site_urls"` - // UTC date in the format 2017-01-25. Any data before this date will not be replicated. - StartDate types.Date `json:"start_date"` + // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. + StartDate *types.Date `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcegooglesheets.go b/internal/sdk/pkg/models/shared/sourcegooglesheets.go index a1f786b2f..b2b5941e8 100755 --- a/internal/sdk/pkg/models/shared/sourcegooglesheets.go +++ b/internal/sdk/pkg/models/shared/sourcegooglesheets.go @@ -110,21 +110,21 @@ func CreateSourceGoogleSheetsAuthenticationSourceGoogleSheetsAuthenticationServi func (u *SourceGoogleSheetsAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth) + sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication := new(SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth); err == nil { - u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth = sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth - u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth + if err := d.Decode(&sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication = sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication + u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication return nil } - sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication := new(SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication) + sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication = sourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication - u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth); err == nil { + u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth = sourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth + u.Type = SourceGoogleSheetsAuthenticationTypeSourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth return nil } @@ -132,14 +132,14 @@ func (u *SourceGoogleSheetsAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceGoogleSheetsAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth != nil { - return json.Marshal(u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth) - } - if u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication != nil { return json.Marshal(u.SourceGoogleSheetsAuthenticationServiceAccountKeyAuthentication) } + if u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth != nil { + return json.Marshal(u.SourceGoogleSheetsAuthenticationAuthenticateViaGoogleOAuth) + } + return nil, nil } @@ -171,10 +171,8 @@ type SourceGoogleSheets struct { // Credentials for connecting to the Google Sheets API Credentials SourceGoogleSheetsAuthentication `json:"credentials"` // Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based. - NamesConversion *bool `json:"names_conversion,omitempty"` - // The number of rows fetched when making a Google Sheet API call. Defaults to 200. - RowBatchSize *int64 `json:"row_batch_size,omitempty"` - SourceType SourceGoogleSheetsGoogleSheets `json:"sourceType"` + NamesConversion *bool `json:"names_conversion,omitempty"` + SourceType SourceGoogleSheetsGoogleSheets `json:"sourceType"` // Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'. SpreadsheetID string `json:"spreadsheet_id"` } diff --git a/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go b/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go index d431bb4d3..88fb73b21 100755 --- a/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go +++ b/internal/sdk/pkg/models/shared/sourcegooglesheetsupdate.go @@ -110,21 +110,21 @@ func CreateSourceGoogleSheetsUpdateAuthenticationSourceGoogleSheetsUpdateAuthent func (u *SourceGoogleSheetsUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth) + sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication := new(SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth); err == nil { - u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth = sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth - u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth + if err := d.Decode(&sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication); err == nil { + u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication = sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication + u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication return nil } - sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication := new(SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication) + sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth := new(SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication); err == nil { - u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication = sourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication - u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication + if err := d.Decode(&sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth); err == nil { + u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth = sourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth + u.Type = SourceGoogleSheetsUpdateAuthenticationTypeSourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth return nil } @@ -132,14 +132,14 @@ func (u *SourceGoogleSheetsUpdateAuthentication) UnmarshalJSON(data []byte) erro } func (u SourceGoogleSheetsUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth != nil { - return json.Marshal(u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth) - } - if u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication != nil { return json.Marshal(u.SourceGoogleSheetsUpdateAuthenticationServiceAccountKeyAuthentication) } + if u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth != nil { + return json.Marshal(u.SourceGoogleSheetsUpdateAuthenticationAuthenticateViaGoogleOAuth) + } + return nil, nil } @@ -148,8 +148,6 @@ type SourceGoogleSheetsUpdate struct { Credentials SourceGoogleSheetsUpdateAuthentication `json:"credentials"` // Enables the conversion of column names to a standardized, SQL-compliant format. For example, 'My Name' -> 'my_name'. Enable this option if your destination is SQL-based. NamesConversion *bool `json:"names_conversion,omitempty"` - // The number of rows fetched when making a Google Sheet API call. Defaults to 200. - RowBatchSize *int64 `json:"row_batch_size,omitempty"` // Enter the link to the Google spreadsheet you want to sync. To copy the link, click the 'Share' button in the top-right corner of the spreadsheet, then click 'Copy link'. SpreadsheetID string `json:"spreadsheet_id"` } diff --git a/internal/sdk/pkg/models/shared/sourceharvest.go b/internal/sdk/pkg/models/shared/sourceharvest.go index 4dc48afb1..81c717acf 100755 --- a/internal/sdk/pkg/models/shared/sourceharvest.go +++ b/internal/sdk/pkg/models/shared/sourceharvest.go @@ -207,21 +207,21 @@ func CreateSourceHarvestAuthenticationMechanismSourceHarvestAuthenticationMechan func (u *SourceHarvestAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth := new(SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) + sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth); err == nil { - u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth = sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth - u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth + if err := d.Decode(&sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { + u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken + u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken return nil } - sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken) + sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth := new(SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { - u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken - u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken + if err := d.Decode(&sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth); err == nil { + u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth = sourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth + u.Type = SourceHarvestAuthenticationMechanismTypeSourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth return nil } @@ -229,14 +229,14 @@ func (u *SourceHarvestAuthenticationMechanism) UnmarshalJSON(data []byte) error } func (u SourceHarvestAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth != nil { - return json.Marshal(u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) - } - if u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil { return json.Marshal(u.SourceHarvestAuthenticationMechanismAuthenticateWithPersonalAccessToken) } + if u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth != nil { + return json.Marshal(u.SourceHarvestAuthenticationMechanismAuthenticateViaHarvestOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceharvestupdate.go b/internal/sdk/pkg/models/shared/sourceharvestupdate.go index a4083467f..bc5a19c06 100755 --- a/internal/sdk/pkg/models/shared/sourceharvestupdate.go +++ b/internal/sdk/pkg/models/shared/sourceharvestupdate.go @@ -207,21 +207,21 @@ func CreateSourceHarvestUpdateAuthenticationMechanismSourceHarvestUpdateAuthenti func (u *SourceHarvestUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth := new(SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) + sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth); err == nil { - u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth = sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth - u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth + if err := d.Decode(&sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { + u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken + u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken return nil } - sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken := new(SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) + sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth := new(SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken); err == nil { - u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken = sourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken - u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken + if err := d.Decode(&sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth); err == nil { + u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth = sourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth + u.Type = SourceHarvestUpdateAuthenticationMechanismTypeSourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth return nil } @@ -229,14 +229,14 @@ func (u *SourceHarvestUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) } func (u SourceHarvestUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth != nil { - return json.Marshal(u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) - } - if u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken != nil { return json.Marshal(u.SourceHarvestUpdateAuthenticationMechanismAuthenticateWithPersonalAccessToken) } + if u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth != nil { + return json.Marshal(u.SourceHarvestUpdateAuthenticationMechanismAuthenticateViaHarvestOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcehubspot.go b/internal/sdk/pkg/models/shared/sourcehubspot.go index 121dd21c9..edd2dffda 100755 --- a/internal/sdk/pkg/models/shared/sourcehubspot.go +++ b/internal/sdk/pkg/models/shared/sourcehubspot.go @@ -115,21 +115,21 @@ func CreateSourceHubspotAuthenticationSourceHubspotAuthenticationPrivateApp(sour func (u *SourceHubspotAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceHubspotAuthenticationOAuth := new(SourceHubspotAuthenticationOAuth) + sourceHubspotAuthenticationPrivateApp := new(SourceHubspotAuthenticationPrivateApp) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHubspotAuthenticationOAuth); err == nil { - u.SourceHubspotAuthenticationOAuth = sourceHubspotAuthenticationOAuth - u.Type = SourceHubspotAuthenticationTypeSourceHubspotAuthenticationOAuth + if err := d.Decode(&sourceHubspotAuthenticationPrivateApp); err == nil { + u.SourceHubspotAuthenticationPrivateApp = sourceHubspotAuthenticationPrivateApp + u.Type = SourceHubspotAuthenticationTypeSourceHubspotAuthenticationPrivateApp return nil } - sourceHubspotAuthenticationPrivateApp := new(SourceHubspotAuthenticationPrivateApp) + sourceHubspotAuthenticationOAuth := new(SourceHubspotAuthenticationOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHubspotAuthenticationPrivateApp); err == nil { - u.SourceHubspotAuthenticationPrivateApp = sourceHubspotAuthenticationPrivateApp - u.Type = SourceHubspotAuthenticationTypeSourceHubspotAuthenticationPrivateApp + if err := d.Decode(&sourceHubspotAuthenticationOAuth); err == nil { + u.SourceHubspotAuthenticationOAuth = sourceHubspotAuthenticationOAuth + u.Type = SourceHubspotAuthenticationTypeSourceHubspotAuthenticationOAuth return nil } @@ -137,14 +137,14 @@ func (u *SourceHubspotAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceHubspotAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceHubspotAuthenticationOAuth != nil { - return json.Marshal(u.SourceHubspotAuthenticationOAuth) - } - if u.SourceHubspotAuthenticationPrivateApp != nil { return json.Marshal(u.SourceHubspotAuthenticationPrivateApp) } + if u.SourceHubspotAuthenticationOAuth != nil { + return json.Marshal(u.SourceHubspotAuthenticationOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcehubspotupdate.go b/internal/sdk/pkg/models/shared/sourcehubspotupdate.go index 454b4ffe2..b754f54f8 100755 --- a/internal/sdk/pkg/models/shared/sourcehubspotupdate.go +++ b/internal/sdk/pkg/models/shared/sourcehubspotupdate.go @@ -115,21 +115,21 @@ func CreateSourceHubspotUpdateAuthenticationSourceHubspotUpdateAuthenticationPri func (u *SourceHubspotUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceHubspotUpdateAuthenticationOAuth := new(SourceHubspotUpdateAuthenticationOAuth) + sourceHubspotUpdateAuthenticationPrivateApp := new(SourceHubspotUpdateAuthenticationPrivateApp) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHubspotUpdateAuthenticationOAuth); err == nil { - u.SourceHubspotUpdateAuthenticationOAuth = sourceHubspotUpdateAuthenticationOAuth - u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationOAuth + if err := d.Decode(&sourceHubspotUpdateAuthenticationPrivateApp); err == nil { + u.SourceHubspotUpdateAuthenticationPrivateApp = sourceHubspotUpdateAuthenticationPrivateApp + u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationPrivateApp return nil } - sourceHubspotUpdateAuthenticationPrivateApp := new(SourceHubspotUpdateAuthenticationPrivateApp) + sourceHubspotUpdateAuthenticationOAuth := new(SourceHubspotUpdateAuthenticationOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceHubspotUpdateAuthenticationPrivateApp); err == nil { - u.SourceHubspotUpdateAuthenticationPrivateApp = sourceHubspotUpdateAuthenticationPrivateApp - u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationPrivateApp + if err := d.Decode(&sourceHubspotUpdateAuthenticationOAuth); err == nil { + u.SourceHubspotUpdateAuthenticationOAuth = sourceHubspotUpdateAuthenticationOAuth + u.Type = SourceHubspotUpdateAuthenticationTypeSourceHubspotUpdateAuthenticationOAuth return nil } @@ -137,14 +137,14 @@ func (u *SourceHubspotUpdateAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceHubspotUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceHubspotUpdateAuthenticationOAuth != nil { - return json.Marshal(u.SourceHubspotUpdateAuthenticationOAuth) - } - if u.SourceHubspotUpdateAuthenticationPrivateApp != nil { return json.Marshal(u.SourceHubspotUpdateAuthenticationPrivateApp) } + if u.SourceHubspotUpdateAuthenticationOAuth != nil { + return json.Marshal(u.SourceHubspotUpdateAuthenticationOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceinsightly.go b/internal/sdk/pkg/models/shared/sourceinsightly.go index 1d6f574f8..2f01f463f 100755 --- a/internal/sdk/pkg/models/shared/sourceinsightly.go +++ b/internal/sdk/pkg/models/shared/sourceinsightly.go @@ -34,7 +34,7 @@ func (e *SourceInsightlyInsightly) UnmarshalJSON(data []byte) error { type SourceInsightly struct { SourceType SourceInsightlyInsightly `json:"sourceType"` // The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams. - StartDate string `json:"start_date"` + StartDate *string `json:"start_date"` // Your Insightly API token. - Token string `json:"token"` + Token *string `json:"token"` } diff --git a/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go b/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go index 42140249d..57e3a7dfb 100755 --- a/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go +++ b/internal/sdk/pkg/models/shared/sourceinsightlyupdate.go @@ -4,7 +4,7 @@ package shared type SourceInsightlyUpdate struct { // The date from which you'd like to replicate data for Insightly in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. Note that it will be used only for incremental streams. - StartDate string `json:"start_date"` + StartDate *string `json:"start_date"` // Your Insightly API token. - Token string `json:"token"` + Token *string `json:"token"` } diff --git a/internal/sdk/pkg/models/shared/sourceintercom.go b/internal/sdk/pkg/models/shared/sourceintercom.go index 17cb0ae54..375394f1b 100755 --- a/internal/sdk/pkg/models/shared/sourceintercom.go +++ b/internal/sdk/pkg/models/shared/sourceintercom.go @@ -34,8 +34,12 @@ func (e *SourceIntercomIntercom) UnmarshalJSON(data []byte) error { type SourceIntercom struct { // Access token for making authenticated requests. See the Intercom docs for more information. - AccessToken string `json:"access_token"` - SourceType SourceIntercomIntercom `json:"sourceType"` + AccessToken string `json:"access_token"` + // Client Id for your Intercom application. + ClientID *string `json:"client_id,omitempty"` + // Client Secret for your Intercom application. + ClientSecret *string `json:"client_secret,omitempty"` + SourceType SourceIntercomIntercom `json:"sourceType"` // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. StartDate time.Time `json:"start_date"` } diff --git a/internal/sdk/pkg/models/shared/sourceintercomupdate.go b/internal/sdk/pkg/models/shared/sourceintercomupdate.go index 80e2ef4a2..f9fe5214e 100755 --- a/internal/sdk/pkg/models/shared/sourceintercomupdate.go +++ b/internal/sdk/pkg/models/shared/sourceintercomupdate.go @@ -9,6 +9,10 @@ import ( type SourceIntercomUpdate struct { // Access token for making authenticated requests. See the Intercom docs for more information. AccessToken string `json:"access_token"` + // Client Id for your Intercom application. + ClientID *string `json:"client_id,omitempty"` + // Client Secret for your Intercom application. + ClientSecret *string `json:"client_secret,omitempty"` // UTC date and time in the format 2017-01-25T00:00:00Z. Any data before this date will not be replicated. StartDate time.Time `json:"start_date"` } diff --git a/internal/sdk/pkg/models/shared/sourcelemlist.go b/internal/sdk/pkg/models/shared/sourcelemlist.go index c260b8ae1..1b132a68e 100755 --- a/internal/sdk/pkg/models/shared/sourcelemlist.go +++ b/internal/sdk/pkg/models/shared/sourcelemlist.go @@ -32,7 +32,7 @@ func (e *SourceLemlistLemlist) UnmarshalJSON(data []byte) error { } type SourceLemlist struct { - // Lemlist API key. + // Lemlist API key, APIKey string `json:"api_key"` SourceType SourceLemlistLemlist `json:"sourceType"` } diff --git a/internal/sdk/pkg/models/shared/sourcelemlistupdate.go b/internal/sdk/pkg/models/shared/sourcelemlistupdate.go index a8c6a6096..8781739b8 100755 --- a/internal/sdk/pkg/models/shared/sourcelemlistupdate.go +++ b/internal/sdk/pkg/models/shared/sourcelemlistupdate.go @@ -3,6 +3,6 @@ package shared type SourceLemlistUpdate struct { - // Lemlist API key. + // Lemlist API key, APIKey string `json:"api_key"` } diff --git a/internal/sdk/pkg/models/shared/sourceleverhiring.go b/internal/sdk/pkg/models/shared/sourceleverhiring.go index e7ec42f08..7cb461607 100755 --- a/internal/sdk/pkg/models/shared/sourceleverhiring.go +++ b/internal/sdk/pkg/models/shared/sourceleverhiring.go @@ -110,21 +110,21 @@ func CreateSourceLeverHiringAuthenticationMechanismSourceLeverHiringAuthenticati func (u *SourceLeverHiringAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth := new(SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth) + sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey := new(SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth); err == nil { - u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth = sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth - u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth + if err := d.Decode(&sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey); err == nil { + u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey = sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey + u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey return nil } - sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey := new(SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey) + sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth := new(SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey); err == nil { - u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey = sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey - u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey + if err := d.Decode(&sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth); err == nil { + u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth = sourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth + u.Type = SourceLeverHiringAuthenticationMechanismTypeSourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth return nil } @@ -132,14 +132,14 @@ func (u *SourceLeverHiringAuthenticationMechanism) UnmarshalJSON(data []byte) er } func (u SourceLeverHiringAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth != nil { - return json.Marshal(u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth) - } - if u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey != nil { return json.Marshal(u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverAPIKey) } + if u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth != nil { + return json.Marshal(u.SourceLeverHiringAuthenticationMechanismAuthenticateViaLeverOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go b/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go index ed6fef557..2e93832b9 100755 --- a/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go +++ b/internal/sdk/pkg/models/shared/sourceleverhiringupdate.go @@ -110,21 +110,21 @@ func CreateSourceLeverHiringUpdateAuthenticationMechanismSourceLeverHiringUpdate func (u *SourceLeverHiringUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth := new(SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth) + sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey := new(SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth); err == nil { - u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth = sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth - u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth + if err := d.Decode(&sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey); err == nil { + u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey = sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey + u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey return nil } - sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey := new(SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey) + sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth := new(SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey); err == nil { - u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey = sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey - u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey + if err := d.Decode(&sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth); err == nil { + u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth = sourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth + u.Type = SourceLeverHiringUpdateAuthenticationMechanismTypeSourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth return nil } @@ -132,14 +132,14 @@ func (u *SourceLeverHiringUpdateAuthenticationMechanism) UnmarshalJSON(data []by } func (u SourceLeverHiringUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth != nil { - return json.Marshal(u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth) - } - if u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey != nil { return json.Marshal(u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverAPIKey) } + if u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth != nil { + return json.Marshal(u.SourceLeverHiringUpdateAuthenticationMechanismAuthenticateViaLeverOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinads.go b/internal/sdk/pkg/models/shared/sourcelinkedinads.go index 4b2220701..a397e7309 100755 --- a/internal/sdk/pkg/models/shared/sourcelinkedinads.go +++ b/internal/sdk/pkg/models/shared/sourcelinkedinads.go @@ -10,38 +10,38 @@ import ( "fmt" ) -// SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy - Select value from list to pivot by -type SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy string +// SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. +type SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory string const ( - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByCompany SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "COMPANY" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByAccount SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "ACCOUNT" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByShare SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "SHARE" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByCampaign SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CAMPAIGN" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByCreative SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CREATIVE" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByCampaignGroup SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CAMPAIGN_GROUP" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByConversion SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CONVERSION" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByConversationNode SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CONVERSATION_NODE" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByConversationNodeOptionIndex SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CONVERSATION_NODE_OPTION_INDEX" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByServingLocation SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "SERVING_LOCATION" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByCardIndex SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "CARD_INDEX" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberCompanySize SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_COMPANY_SIZE" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberIndustry SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_INDUSTRY" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberSeniority SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_SENIORITY" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberJobTitle SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_JOB_TITLE " - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberJobFunction SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_JOB_FUNCTION " - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberCountryV2 SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_COUNTRY_V2 " - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberRegionV2 SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_REGION_V2" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByMemberCompany SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "MEMBER_COMPANY" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByPlacementName SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "PLACEMENT_NAME" - SourceLinkedinAdsAdAnalyticsReportConfigurationPivotByImpressionDeviceType SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy = "IMPRESSION_DEVICE_TYPE" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCompany SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "COMPANY" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryAccount SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "ACCOUNT" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryShare SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "SHARE" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCampaign SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCreative SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CREATIVE" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCampaignGroup SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN_GROUP" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryConversion SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CONVERSION" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryConversationNode SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryConversationNodeOptionIndex SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE_OPTION_INDEX" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryServingLocation SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "SERVING_LOCATION" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryCardIndex SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "CARD_INDEX" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberCompanySize SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY_SIZE" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberIndustry SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_INDUSTRY" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberSeniority SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_SENIORITY" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberJobTitle SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_TITLE " + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberJobFunction SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_FUNCTION " + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberCountryV2 SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COUNTRY_V2 " + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberRegionV2 SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_REGION_V2" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryMemberCompany SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryPlacementName SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "PLACEMENT_NAME" + SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategoryImpressionDeviceType SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory = "IMPRESSION_DEVICE_TYPE" ) -func (e SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy) ToPointer() *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy { +func (e SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory) ToPointer() *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory { return &e } -func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy) UnmarshalJSON(data []byte) error { +func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err @@ -88,18 +88,14 @@ func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy) UnmarshalJSON(d case "PLACEMENT_NAME": fallthrough case "IMPRESSION_DEVICE_TYPE": - *e = SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy(v) + *e = SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory(v) return nil default: - return fmt.Errorf("invalid value for SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy: %v", v) + return fmt.Errorf("invalid value for SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory: %v", v) } } -// SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity - Set time granularity for report: -// ALL - Results grouped into a single result across the entire time range of the report. -// DAILY - Results grouped by day. -// MONTHLY - Results grouped by month. -// YEARLY - Results grouped by year. +// SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity - Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. type SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity string const ( @@ -135,15 +131,11 @@ func (e *SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity) Unmarsh // SourceLinkedinAdsAdAnalyticsReportConfiguration - Config for custom ad Analytics Report type SourceLinkedinAdsAdAnalyticsReportConfiguration struct { - // The name for the report + // The name for the custom report. Name string `json:"name"` - // Select value from list to pivot by - PivotBy SourceLinkedinAdsAdAnalyticsReportConfigurationPivotBy `json:"pivot_by"` - // Set time granularity for report: - // ALL - Results grouped into a single result across the entire time range of the report. - // DAILY - Results grouped by day. - // MONTHLY - Results grouped by month. - // YEARLY - Results grouped by year. + // Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. + PivotBy SourceLinkedinAdsAdAnalyticsReportConfigurationPivotCategory `json:"pivot_by"` + // Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. TimeGranularity SourceLinkedinAdsAdAnalyticsReportConfigurationTimeGranularity `json:"time_granularity"` } @@ -172,7 +164,7 @@ func (e *SourceLinkedinAdsAuthenticationAccessTokenAuthMethod) UnmarshalJSON(dat } type SourceLinkedinAdsAuthenticationAccessToken struct { - // The token value generated using the authentication code. See the docs to obtain yours. + // The access token generated for your developer application. Refer to our documentation for more information. AccessToken string `json:"access_token"` AuthMethod *SourceLinkedinAdsAuthenticationAccessTokenAuthMethod `json:"auth_method,omitempty"` } @@ -203,11 +195,11 @@ func (e *SourceLinkedinAdsAuthenticationOAuth20AuthMethod) UnmarshalJSON(data [] type SourceLinkedinAdsAuthenticationOAuth20 struct { AuthMethod *SourceLinkedinAdsAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"` - // The client ID of the LinkedIn Ads developer application. + // The client ID of your developer application. Refer to our documentation for more information. ClientID string `json:"client_id"` - // The client secret the LinkedIn Ads developer application. + // The client secret of your developer application. Refer to our documentation for more information. ClientSecret string `json:"client_secret"` - // The key to refresh the expired access token. + // The key to refresh the expired access token. Refer to our documentation for more information. RefreshToken string `json:"refresh_token"` } @@ -215,7 +207,7 @@ type SourceLinkedinAdsAuthenticationType string const ( SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationOAuth20 SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_Authentication_OAuth2.0" - SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_Authentication_Access token" + SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken SourceLinkedinAdsAuthenticationType = "source-linkedin-ads_Authentication_Access Token" ) type SourceLinkedinAdsAuthentication struct { @@ -246,21 +238,21 @@ func CreateSourceLinkedinAdsAuthenticationSourceLinkedinAdsAuthenticationAccessT func (u *SourceLinkedinAdsAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceLinkedinAdsAuthenticationOAuth20 := new(SourceLinkedinAdsAuthenticationOAuth20) + sourceLinkedinAdsAuthenticationAccessToken := new(SourceLinkedinAdsAuthenticationAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinAdsAuthenticationOAuth20); err == nil { - u.SourceLinkedinAdsAuthenticationOAuth20 = sourceLinkedinAdsAuthenticationOAuth20 - u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationOAuth20 + if err := d.Decode(&sourceLinkedinAdsAuthenticationAccessToken); err == nil { + u.SourceLinkedinAdsAuthenticationAccessToken = sourceLinkedinAdsAuthenticationAccessToken + u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken return nil } - sourceLinkedinAdsAuthenticationAccessToken := new(SourceLinkedinAdsAuthenticationAccessToken) + sourceLinkedinAdsAuthenticationOAuth20 := new(SourceLinkedinAdsAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinAdsAuthenticationAccessToken); err == nil { - u.SourceLinkedinAdsAuthenticationAccessToken = sourceLinkedinAdsAuthenticationAccessToken - u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationAccessToken + if err := d.Decode(&sourceLinkedinAdsAuthenticationOAuth20); err == nil { + u.SourceLinkedinAdsAuthenticationOAuth20 = sourceLinkedinAdsAuthenticationOAuth20 + u.Type = SourceLinkedinAdsAuthenticationTypeSourceLinkedinAdsAuthenticationOAuth20 return nil } @@ -268,14 +260,14 @@ func (u *SourceLinkedinAdsAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceLinkedinAdsAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceLinkedinAdsAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceLinkedinAdsAuthenticationOAuth20) - } - if u.SourceLinkedinAdsAuthenticationAccessToken != nil { return json.Marshal(u.SourceLinkedinAdsAuthenticationAccessToken) } + if u.SourceLinkedinAdsAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceLinkedinAdsAuthenticationOAuth20) + } + return nil, nil } @@ -304,11 +296,11 @@ func (e *SourceLinkedinAdsLinkedinAds) UnmarshalJSON(data []byte) error { } type SourceLinkedinAds struct { - // Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info. + // Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs. AccountIds []int64 `json:"account_ids,omitempty"` AdAnalyticsReports []SourceLinkedinAdsAdAnalyticsReportConfiguration `json:"ad_analytics_reports,omitempty"` Credentials *SourceLinkedinAdsAuthentication `json:"credentials,omitempty"` SourceType SourceLinkedinAdsLinkedinAds `json:"sourceType"` - // UTC date in the format 2020-09-17. Any data before this date will not be replicated. + // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. StartDate types.Date `json:"start_date"` } diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go b/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go index 723509bc4..27c2d63fc 100755 --- a/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go +++ b/internal/sdk/pkg/models/shared/sourcelinkedinadsupdate.go @@ -10,38 +10,38 @@ import ( "fmt" ) -// SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy - Select value from list to pivot by -type SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy string +// SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory - Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. +type SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory string const ( - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByCompany SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "COMPANY" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByAccount SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "ACCOUNT" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByShare SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "SHARE" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByCampaign SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CAMPAIGN" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByCreative SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CREATIVE" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByCampaignGroup SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CAMPAIGN_GROUP" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByConversion SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CONVERSION" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByConversationNode SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CONVERSATION_NODE" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByConversationNodeOptionIndex SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CONVERSATION_NODE_OPTION_INDEX" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByServingLocation SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "SERVING_LOCATION" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByCardIndex SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "CARD_INDEX" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberCompanySize SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_COMPANY_SIZE" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberIndustry SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_INDUSTRY" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberSeniority SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_SENIORITY" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberJobTitle SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_JOB_TITLE " - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberJobFunction SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_JOB_FUNCTION " - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberCountryV2 SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_COUNTRY_V2 " - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberRegionV2 SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_REGION_V2" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByMemberCompany SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "MEMBER_COMPANY" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByPlacementName SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "PLACEMENT_NAME" - SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotByImpressionDeviceType SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy = "IMPRESSION_DEVICE_TYPE" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCompany SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "COMPANY" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryAccount SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "ACCOUNT" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryShare SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "SHARE" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCampaign SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCreative SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CREATIVE" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCampaignGroup SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CAMPAIGN_GROUP" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryConversion SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CONVERSION" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryConversationNode SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryConversationNodeOptionIndex SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CONVERSATION_NODE_OPTION_INDEX" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryServingLocation SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "SERVING_LOCATION" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryCardIndex SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "CARD_INDEX" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberCompanySize SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY_SIZE" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberIndustry SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_INDUSTRY" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberSeniority SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_SENIORITY" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberJobTitle SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_TITLE " + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberJobFunction SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_JOB_FUNCTION " + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberCountryV2 SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COUNTRY_V2 " + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberRegionV2 SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_REGION_V2" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryMemberCompany SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "MEMBER_COMPANY" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryPlacementName SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "PLACEMENT_NAME" + SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategoryImpressionDeviceType SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory = "IMPRESSION_DEVICE_TYPE" ) -func (e SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy) ToPointer() *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy { +func (e SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory) ToPointer() *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory { return &e } -func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy) UnmarshalJSON(data []byte) error { +func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err @@ -88,18 +88,14 @@ func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy) Unmarshal case "PLACEMENT_NAME": fallthrough case "IMPRESSION_DEVICE_TYPE": - *e = SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy(v) + *e = SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory(v) return nil default: - return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy: %v", v) + return fmt.Errorf("invalid value for SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory: %v", v) } } -// SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity - Set time granularity for report: -// ALL - Results grouped into a single result across the entire time range of the report. -// DAILY - Results grouped by day. -// MONTHLY - Results grouped by month. -// YEARLY - Results grouped by year. +// SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity - Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. type SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity string const ( @@ -135,15 +131,11 @@ func (e *SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity) U // SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration - Config for custom ad Analytics Report type SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration struct { - // The name for the report + // The name for the custom report. Name string `json:"name"` - // Select value from list to pivot by - PivotBy SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotBy `json:"pivot_by"` - // Set time granularity for report: - // ALL - Results grouped into a single result across the entire time range of the report. - // DAILY - Results grouped by day. - // MONTHLY - Results grouped by month. - // YEARLY - Results grouped by year. + // Choose a category to pivot your analytics report around. This selection will organize your data based on the chosen attribute, allowing you to analyze trends and performance from different perspectives. + PivotBy SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationPivotCategory `json:"pivot_by"` + // Choose how to group the data in your report by time. The options are:
- 'ALL': A single result summarizing the entire time range.
- 'DAILY': Group results by each day.
- 'MONTHLY': Group results by each month.
- 'YEARLY': Group results by each year.
Selecting a time grouping helps you analyze trends and patterns over different time periods. TimeGranularity SourceLinkedinAdsUpdateAdAnalyticsReportConfigurationTimeGranularity `json:"time_granularity"` } @@ -172,7 +164,7 @@ func (e *SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod) UnmarshalJS } type SourceLinkedinAdsUpdateAuthenticationAccessToken struct { - // The token value generated using the authentication code. See the docs to obtain yours. + // The access token generated for your developer application. Refer to our documentation for more information. AccessToken string `json:"access_token"` AuthMethod *SourceLinkedinAdsUpdateAuthenticationAccessTokenAuthMethod `json:"auth_method,omitempty"` } @@ -203,11 +195,11 @@ func (e *SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod) UnmarshalJSON(d type SourceLinkedinAdsUpdateAuthenticationOAuth20 struct { AuthMethod *SourceLinkedinAdsUpdateAuthenticationOAuth20AuthMethod `json:"auth_method,omitempty"` - // The client ID of the LinkedIn Ads developer application. + // The client ID of your developer application. Refer to our documentation for more information. ClientID string `json:"client_id"` - // The client secret the LinkedIn Ads developer application. + // The client secret of your developer application. Refer to our documentation for more information. ClientSecret string `json:"client_secret"` - // The key to refresh the expired access token. + // The key to refresh the expired access token. Refer to our documentation for more information. RefreshToken string `json:"refresh_token"` } @@ -215,7 +207,7 @@ type SourceLinkedinAdsUpdateAuthenticationType string const ( SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationOAuth20 SourceLinkedinAdsUpdateAuthenticationType = "source-linkedin-ads-update_Authentication_OAuth2.0" - SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken SourceLinkedinAdsUpdateAuthenticationType = "source-linkedin-ads-update_Authentication_Access token" + SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken SourceLinkedinAdsUpdateAuthenticationType = "source-linkedin-ads-update_Authentication_Access Token" ) type SourceLinkedinAdsUpdateAuthentication struct { @@ -246,21 +238,21 @@ func CreateSourceLinkedinAdsUpdateAuthenticationSourceLinkedinAdsUpdateAuthentic func (u *SourceLinkedinAdsUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceLinkedinAdsUpdateAuthenticationOAuth20 := new(SourceLinkedinAdsUpdateAuthenticationOAuth20) + sourceLinkedinAdsUpdateAuthenticationAccessToken := new(SourceLinkedinAdsUpdateAuthenticationAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinAdsUpdateAuthenticationOAuth20); err == nil { - u.SourceLinkedinAdsUpdateAuthenticationOAuth20 = sourceLinkedinAdsUpdateAuthenticationOAuth20 - u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationOAuth20 + if err := d.Decode(&sourceLinkedinAdsUpdateAuthenticationAccessToken); err == nil { + u.SourceLinkedinAdsUpdateAuthenticationAccessToken = sourceLinkedinAdsUpdateAuthenticationAccessToken + u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken return nil } - sourceLinkedinAdsUpdateAuthenticationAccessToken := new(SourceLinkedinAdsUpdateAuthenticationAccessToken) + sourceLinkedinAdsUpdateAuthenticationOAuth20 := new(SourceLinkedinAdsUpdateAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinAdsUpdateAuthenticationAccessToken); err == nil { - u.SourceLinkedinAdsUpdateAuthenticationAccessToken = sourceLinkedinAdsUpdateAuthenticationAccessToken - u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationAccessToken + if err := d.Decode(&sourceLinkedinAdsUpdateAuthenticationOAuth20); err == nil { + u.SourceLinkedinAdsUpdateAuthenticationOAuth20 = sourceLinkedinAdsUpdateAuthenticationOAuth20 + u.Type = SourceLinkedinAdsUpdateAuthenticationTypeSourceLinkedinAdsUpdateAuthenticationOAuth20 return nil } @@ -268,22 +260,22 @@ func (u *SourceLinkedinAdsUpdateAuthentication) UnmarshalJSON(data []byte) error } func (u SourceLinkedinAdsUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceLinkedinAdsUpdateAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceLinkedinAdsUpdateAuthenticationOAuth20) - } - if u.SourceLinkedinAdsUpdateAuthenticationAccessToken != nil { return json.Marshal(u.SourceLinkedinAdsUpdateAuthenticationAccessToken) } + if u.SourceLinkedinAdsUpdateAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceLinkedinAdsUpdateAuthenticationOAuth20) + } + return nil, nil } type SourceLinkedinAdsUpdate struct { - // Specify the account IDs separated by a space, to pull the data from. Leave empty, if you want to pull the data from all associated accounts. See the LinkedIn Ads docs for more info. + // Specify the account IDs to pull data from, separated by a space. Leave this field empty if you want to pull the data from all accounts accessible by the authenticated user. See the LinkedIn docs to locate these IDs. AccountIds []int64 `json:"account_ids,omitempty"` AdAnalyticsReports []SourceLinkedinAdsUpdateAdAnalyticsReportConfiguration `json:"ad_analytics_reports,omitempty"` Credentials *SourceLinkedinAdsUpdateAuthentication `json:"credentials,omitempty"` - // UTC date in the format 2020-09-17. Any data before this date will not be replicated. + // UTC date in the format YYYY-MM-DD. Any data before this date will not be replicated. StartDate types.Date `json:"start_date"` } diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinpages.go b/internal/sdk/pkg/models/shared/sourcelinkedinpages.go index 646f05fcf..d57609355 100755 --- a/internal/sdk/pkg/models/shared/sourcelinkedinpages.go +++ b/internal/sdk/pkg/models/shared/sourcelinkedinpages.go @@ -108,21 +108,21 @@ func CreateSourceLinkedinPagesAuthenticationSourceLinkedinPagesAuthenticationAcc func (u *SourceLinkedinPagesAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceLinkedinPagesAuthenticationOAuth20 := new(SourceLinkedinPagesAuthenticationOAuth20) + sourceLinkedinPagesAuthenticationAccessToken := new(SourceLinkedinPagesAuthenticationAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinPagesAuthenticationOAuth20); err == nil { - u.SourceLinkedinPagesAuthenticationOAuth20 = sourceLinkedinPagesAuthenticationOAuth20 - u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationOAuth20 + if err := d.Decode(&sourceLinkedinPagesAuthenticationAccessToken); err == nil { + u.SourceLinkedinPagesAuthenticationAccessToken = sourceLinkedinPagesAuthenticationAccessToken + u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationAccessToken return nil } - sourceLinkedinPagesAuthenticationAccessToken := new(SourceLinkedinPagesAuthenticationAccessToken) + sourceLinkedinPagesAuthenticationOAuth20 := new(SourceLinkedinPagesAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinPagesAuthenticationAccessToken); err == nil { - u.SourceLinkedinPagesAuthenticationAccessToken = sourceLinkedinPagesAuthenticationAccessToken - u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationAccessToken + if err := d.Decode(&sourceLinkedinPagesAuthenticationOAuth20); err == nil { + u.SourceLinkedinPagesAuthenticationOAuth20 = sourceLinkedinPagesAuthenticationOAuth20 + u.Type = SourceLinkedinPagesAuthenticationTypeSourceLinkedinPagesAuthenticationOAuth20 return nil } @@ -130,14 +130,14 @@ func (u *SourceLinkedinPagesAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceLinkedinPagesAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceLinkedinPagesAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceLinkedinPagesAuthenticationOAuth20) - } - if u.SourceLinkedinPagesAuthenticationAccessToken != nil { return json.Marshal(u.SourceLinkedinPagesAuthenticationAccessToken) } + if u.SourceLinkedinPagesAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceLinkedinPagesAuthenticationOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go b/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go index bc9104a3e..d6272b51c 100755 --- a/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go +++ b/internal/sdk/pkg/models/shared/sourcelinkedinpagesupdate.go @@ -108,21 +108,21 @@ func CreateSourceLinkedinPagesUpdateAuthenticationSourceLinkedinPagesUpdateAuthe func (u *SourceLinkedinPagesUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceLinkedinPagesUpdateAuthenticationOAuth20 := new(SourceLinkedinPagesUpdateAuthenticationOAuth20) + sourceLinkedinPagesUpdateAuthenticationAccessToken := new(SourceLinkedinPagesUpdateAuthenticationAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinPagesUpdateAuthenticationOAuth20); err == nil { - u.SourceLinkedinPagesUpdateAuthenticationOAuth20 = sourceLinkedinPagesUpdateAuthenticationOAuth20 - u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationOAuth20 + if err := d.Decode(&sourceLinkedinPagesUpdateAuthenticationAccessToken); err == nil { + u.SourceLinkedinPagesUpdateAuthenticationAccessToken = sourceLinkedinPagesUpdateAuthenticationAccessToken + u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationAccessToken return nil } - sourceLinkedinPagesUpdateAuthenticationAccessToken := new(SourceLinkedinPagesUpdateAuthenticationAccessToken) + sourceLinkedinPagesUpdateAuthenticationOAuth20 := new(SourceLinkedinPagesUpdateAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceLinkedinPagesUpdateAuthenticationAccessToken); err == nil { - u.SourceLinkedinPagesUpdateAuthenticationAccessToken = sourceLinkedinPagesUpdateAuthenticationAccessToken - u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationAccessToken + if err := d.Decode(&sourceLinkedinPagesUpdateAuthenticationOAuth20); err == nil { + u.SourceLinkedinPagesUpdateAuthenticationOAuth20 = sourceLinkedinPagesUpdateAuthenticationOAuth20 + u.Type = SourceLinkedinPagesUpdateAuthenticationTypeSourceLinkedinPagesUpdateAuthenticationOAuth20 return nil } @@ -130,14 +130,14 @@ func (u *SourceLinkedinPagesUpdateAuthentication) UnmarshalJSON(data []byte) err } func (u SourceLinkedinPagesUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceLinkedinPagesUpdateAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceLinkedinPagesUpdateAuthenticationOAuth20) - } - if u.SourceLinkedinPagesUpdateAuthenticationAccessToken != nil { return json.Marshal(u.SourceLinkedinPagesUpdateAuthenticationAccessToken) } + if u.SourceLinkedinPagesUpdateAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceLinkedinPagesUpdateAuthenticationOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemailchimp.go b/internal/sdk/pkg/models/shared/sourcemailchimp.go index eedb94a7c..1bf0ec2a8 100755 --- a/internal/sdk/pkg/models/shared/sourcemailchimp.go +++ b/internal/sdk/pkg/models/shared/sourcemailchimp.go @@ -108,21 +108,21 @@ func CreateSourceMailchimpAuthenticationSourceMailchimpAuthenticationAPIKey(sour func (u *SourceMailchimpAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMailchimpAuthenticationOAuth20 := new(SourceMailchimpAuthenticationOAuth20) + sourceMailchimpAuthenticationAPIKey := new(SourceMailchimpAuthenticationAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMailchimpAuthenticationOAuth20); err == nil { - u.SourceMailchimpAuthenticationOAuth20 = sourceMailchimpAuthenticationOAuth20 - u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationOAuth20 + if err := d.Decode(&sourceMailchimpAuthenticationAPIKey); err == nil { + u.SourceMailchimpAuthenticationAPIKey = sourceMailchimpAuthenticationAPIKey + u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationAPIKey return nil } - sourceMailchimpAuthenticationAPIKey := new(SourceMailchimpAuthenticationAPIKey) + sourceMailchimpAuthenticationOAuth20 := new(SourceMailchimpAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMailchimpAuthenticationAPIKey); err == nil { - u.SourceMailchimpAuthenticationAPIKey = sourceMailchimpAuthenticationAPIKey - u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationAPIKey + if err := d.Decode(&sourceMailchimpAuthenticationOAuth20); err == nil { + u.SourceMailchimpAuthenticationOAuth20 = sourceMailchimpAuthenticationOAuth20 + u.Type = SourceMailchimpAuthenticationTypeSourceMailchimpAuthenticationOAuth20 return nil } @@ -130,14 +130,14 @@ func (u *SourceMailchimpAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceMailchimpAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceMailchimpAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceMailchimpAuthenticationOAuth20) - } - if u.SourceMailchimpAuthenticationAPIKey != nil { return json.Marshal(u.SourceMailchimpAuthenticationAPIKey) } + if u.SourceMailchimpAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceMailchimpAuthenticationOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go b/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go index fe983a38a..beaab7dda 100755 --- a/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemailchimpupdate.go @@ -108,21 +108,21 @@ func CreateSourceMailchimpUpdateAuthenticationSourceMailchimpUpdateAuthenticatio func (u *SourceMailchimpUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMailchimpUpdateAuthenticationOAuth20 := new(SourceMailchimpUpdateAuthenticationOAuth20) + sourceMailchimpUpdateAuthenticationAPIKey := new(SourceMailchimpUpdateAuthenticationAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMailchimpUpdateAuthenticationOAuth20); err == nil { - u.SourceMailchimpUpdateAuthenticationOAuth20 = sourceMailchimpUpdateAuthenticationOAuth20 - u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationOAuth20 + if err := d.Decode(&sourceMailchimpUpdateAuthenticationAPIKey); err == nil { + u.SourceMailchimpUpdateAuthenticationAPIKey = sourceMailchimpUpdateAuthenticationAPIKey + u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationAPIKey return nil } - sourceMailchimpUpdateAuthenticationAPIKey := new(SourceMailchimpUpdateAuthenticationAPIKey) + sourceMailchimpUpdateAuthenticationOAuth20 := new(SourceMailchimpUpdateAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMailchimpUpdateAuthenticationAPIKey); err == nil { - u.SourceMailchimpUpdateAuthenticationAPIKey = sourceMailchimpUpdateAuthenticationAPIKey - u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationAPIKey + if err := d.Decode(&sourceMailchimpUpdateAuthenticationOAuth20); err == nil { + u.SourceMailchimpUpdateAuthenticationOAuth20 = sourceMailchimpUpdateAuthenticationOAuth20 + u.Type = SourceMailchimpUpdateAuthenticationTypeSourceMailchimpUpdateAuthenticationOAuth20 return nil } @@ -130,14 +130,14 @@ func (u *SourceMailchimpUpdateAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceMailchimpUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceMailchimpUpdateAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceMailchimpUpdateAuthenticationOAuth20) - } - if u.SourceMailchimpUpdateAuthenticationAPIKey != nil { return json.Marshal(u.SourceMailchimpUpdateAuthenticationAPIKey) } + if u.SourceMailchimpUpdateAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceMailchimpUpdateAuthenticationOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go b/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go index 00992f760..86ce9798c 100755 --- a/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go +++ b/internal/sdk/pkg/models/shared/sourcemicrosoftteams.go @@ -116,21 +116,21 @@ func CreateSourceMicrosoftTeamsAuthenticationMechanismSourceMicrosoftTeamsAuthen func (u *SourceMicrosoftTeamsAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) + sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20); err == nil { - u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 - u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 + if err := d.Decode(&sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft); err == nil { + u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft = sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft + u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft return nil } - sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft) + sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft); err == nil { - u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft = sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft - u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft + if err := d.Decode(&sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20); err == nil { + u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 + u.Type = SourceMicrosoftTeamsAuthenticationMechanismTypeSourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 return nil } @@ -138,14 +138,14 @@ func (u *SourceMicrosoftTeamsAuthenticationMechanism) UnmarshalJSON(data []byte) } func (u SourceMicrosoftTeamsAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil { - return json.Marshal(u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) - } - if u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft != nil { return json.Marshal(u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoft) } + if u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil { + return json.Marshal(u.SourceMicrosoftTeamsAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go b/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go index 3eec09c73..510b700cd 100755 --- a/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemicrosoftteamsupdate.go @@ -116,21 +116,21 @@ func CreateSourceMicrosoftTeamsUpdateAuthenticationMechanismSourceMicrosoftTeams func (u *SourceMicrosoftTeamsUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) + sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20); err == nil { - u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 - u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 + if err := d.Decode(&sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft); err == nil { + u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft = sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft + u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft return nil } - sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft := new(SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft) + sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 := new(SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft); err == nil { - u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft = sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft - u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft + if err := d.Decode(&sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20); err == nil { + u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 = sourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 + u.Type = SourceMicrosoftTeamsUpdateAuthenticationMechanismTypeSourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 return nil } @@ -138,14 +138,14 @@ func (u *SourceMicrosoftTeamsUpdateAuthenticationMechanism) UnmarshalJSON(data [ } func (u SourceMicrosoftTeamsUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil { - return json.Marshal(u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) - } - if u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft != nil { return json.Marshal(u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoft) } + if u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20 != nil { + return json.Marshal(u.SourceMicrosoftTeamsUpdateAuthenticationMechanismAuthenticateViaMicrosoftOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemixpanel.go b/internal/sdk/pkg/models/shared/sourcemixpanel.go index b6aa1db03..6b993de7a 100755 --- a/internal/sdk/pkg/models/shared/sourcemixpanel.go +++ b/internal/sdk/pkg/models/shared/sourcemixpanel.go @@ -109,21 +109,21 @@ func CreateSourceMixpanelAuthenticationWildcardSourceMixpanelAuthenticationWildc func (u *SourceMixpanelAuthenticationWildcard) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMixpanelAuthenticationWildcardServiceAccount := new(SourceMixpanelAuthenticationWildcardServiceAccount) + sourceMixpanelAuthenticationWildcardProjectSecret := new(SourceMixpanelAuthenticationWildcardProjectSecret) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMixpanelAuthenticationWildcardServiceAccount); err == nil { - u.SourceMixpanelAuthenticationWildcardServiceAccount = sourceMixpanelAuthenticationWildcardServiceAccount - u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardServiceAccount + if err := d.Decode(&sourceMixpanelAuthenticationWildcardProjectSecret); err == nil { + u.SourceMixpanelAuthenticationWildcardProjectSecret = sourceMixpanelAuthenticationWildcardProjectSecret + u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardProjectSecret return nil } - sourceMixpanelAuthenticationWildcardProjectSecret := new(SourceMixpanelAuthenticationWildcardProjectSecret) + sourceMixpanelAuthenticationWildcardServiceAccount := new(SourceMixpanelAuthenticationWildcardServiceAccount) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMixpanelAuthenticationWildcardProjectSecret); err == nil { - u.SourceMixpanelAuthenticationWildcardProjectSecret = sourceMixpanelAuthenticationWildcardProjectSecret - u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardProjectSecret + if err := d.Decode(&sourceMixpanelAuthenticationWildcardServiceAccount); err == nil { + u.SourceMixpanelAuthenticationWildcardServiceAccount = sourceMixpanelAuthenticationWildcardServiceAccount + u.Type = SourceMixpanelAuthenticationWildcardTypeSourceMixpanelAuthenticationWildcardServiceAccount return nil } @@ -131,14 +131,14 @@ func (u *SourceMixpanelAuthenticationWildcard) UnmarshalJSON(data []byte) error } func (u SourceMixpanelAuthenticationWildcard) MarshalJSON() ([]byte, error) { - if u.SourceMixpanelAuthenticationWildcardServiceAccount != nil { - return json.Marshal(u.SourceMixpanelAuthenticationWildcardServiceAccount) - } - if u.SourceMixpanelAuthenticationWildcardProjectSecret != nil { return json.Marshal(u.SourceMixpanelAuthenticationWildcardProjectSecret) } + if u.SourceMixpanelAuthenticationWildcardServiceAccount != nil { + return json.Marshal(u.SourceMixpanelAuthenticationWildcardServiceAccount) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go b/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go index 03ba7fbcf..7d1dc36bd 100755 --- a/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemixpanelupdate.go @@ -109,21 +109,21 @@ func CreateSourceMixpanelUpdateAuthenticationWildcardSourceMixpanelUpdateAuthent func (u *SourceMixpanelUpdateAuthenticationWildcard) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMixpanelUpdateAuthenticationWildcardServiceAccount := new(SourceMixpanelUpdateAuthenticationWildcardServiceAccount) + sourceMixpanelUpdateAuthenticationWildcardProjectSecret := new(SourceMixpanelUpdateAuthenticationWildcardProjectSecret) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMixpanelUpdateAuthenticationWildcardServiceAccount); err == nil { - u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount = sourceMixpanelUpdateAuthenticationWildcardServiceAccount - u.Type = SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardServiceAccount + if err := d.Decode(&sourceMixpanelUpdateAuthenticationWildcardProjectSecret); err == nil { + u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret = sourceMixpanelUpdateAuthenticationWildcardProjectSecret + u.Type = SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardProjectSecret return nil } - sourceMixpanelUpdateAuthenticationWildcardProjectSecret := new(SourceMixpanelUpdateAuthenticationWildcardProjectSecret) + sourceMixpanelUpdateAuthenticationWildcardServiceAccount := new(SourceMixpanelUpdateAuthenticationWildcardServiceAccount) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMixpanelUpdateAuthenticationWildcardProjectSecret); err == nil { - u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret = sourceMixpanelUpdateAuthenticationWildcardProjectSecret - u.Type = SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardProjectSecret + if err := d.Decode(&sourceMixpanelUpdateAuthenticationWildcardServiceAccount); err == nil { + u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount = sourceMixpanelUpdateAuthenticationWildcardServiceAccount + u.Type = SourceMixpanelUpdateAuthenticationWildcardTypeSourceMixpanelUpdateAuthenticationWildcardServiceAccount return nil } @@ -131,14 +131,14 @@ func (u *SourceMixpanelUpdateAuthenticationWildcard) UnmarshalJSON(data []byte) } func (u SourceMixpanelUpdateAuthenticationWildcard) MarshalJSON() ([]byte, error) { - if u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount != nil { - return json.Marshal(u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount) - } - if u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret != nil { return json.Marshal(u.SourceMixpanelUpdateAuthenticationWildcardProjectSecret) } + if u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount != nil { + return json.Marshal(u.SourceMixpanelUpdateAuthenticationWildcardServiceAccount) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemonday.go b/internal/sdk/pkg/models/shared/sourcemonday.go index cddb6adf3..aea4a1b89 100755 --- a/internal/sdk/pkg/models/shared/sourcemonday.go +++ b/internal/sdk/pkg/models/shared/sourcemonday.go @@ -110,21 +110,21 @@ func CreateSourceMondayAuthorizationMethodSourceMondayAuthorizationMethodAPIToke func (u *SourceMondayAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMondayAuthorizationMethodOAuth20 := new(SourceMondayAuthorizationMethodOAuth20) + sourceMondayAuthorizationMethodAPIToken := new(SourceMondayAuthorizationMethodAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMondayAuthorizationMethodOAuth20); err == nil { - u.SourceMondayAuthorizationMethodOAuth20 = sourceMondayAuthorizationMethodOAuth20 - u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodOAuth20 + if err := d.Decode(&sourceMondayAuthorizationMethodAPIToken); err == nil { + u.SourceMondayAuthorizationMethodAPIToken = sourceMondayAuthorizationMethodAPIToken + u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodAPIToken return nil } - sourceMondayAuthorizationMethodAPIToken := new(SourceMondayAuthorizationMethodAPIToken) + sourceMondayAuthorizationMethodOAuth20 := new(SourceMondayAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMondayAuthorizationMethodAPIToken); err == nil { - u.SourceMondayAuthorizationMethodAPIToken = sourceMondayAuthorizationMethodAPIToken - u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodAPIToken + if err := d.Decode(&sourceMondayAuthorizationMethodOAuth20); err == nil { + u.SourceMondayAuthorizationMethodOAuth20 = sourceMondayAuthorizationMethodOAuth20 + u.Type = SourceMondayAuthorizationMethodTypeSourceMondayAuthorizationMethodOAuth20 return nil } @@ -132,14 +132,14 @@ func (u *SourceMondayAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceMondayAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceMondayAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceMondayAuthorizationMethodOAuth20) - } - if u.SourceMondayAuthorizationMethodAPIToken != nil { return json.Marshal(u.SourceMondayAuthorizationMethodAPIToken) } + if u.SourceMondayAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceMondayAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemondayupdate.go b/internal/sdk/pkg/models/shared/sourcemondayupdate.go index 1a6749c31..5f2914db2 100755 --- a/internal/sdk/pkg/models/shared/sourcemondayupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemondayupdate.go @@ -110,21 +110,21 @@ func CreateSourceMondayUpdateAuthorizationMethodSourceMondayUpdateAuthorizationM func (u *SourceMondayUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMondayUpdateAuthorizationMethodOAuth20 := new(SourceMondayUpdateAuthorizationMethodOAuth20) + sourceMondayUpdateAuthorizationMethodAPIToken := new(SourceMondayUpdateAuthorizationMethodAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMondayUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceMondayUpdateAuthorizationMethodOAuth20 = sourceMondayUpdateAuthorizationMethodOAuth20 - u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceMondayUpdateAuthorizationMethodAPIToken); err == nil { + u.SourceMondayUpdateAuthorizationMethodAPIToken = sourceMondayUpdateAuthorizationMethodAPIToken + u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodAPIToken return nil } - sourceMondayUpdateAuthorizationMethodAPIToken := new(SourceMondayUpdateAuthorizationMethodAPIToken) + sourceMondayUpdateAuthorizationMethodOAuth20 := new(SourceMondayUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMondayUpdateAuthorizationMethodAPIToken); err == nil { - u.SourceMondayUpdateAuthorizationMethodAPIToken = sourceMondayUpdateAuthorizationMethodAPIToken - u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodAPIToken + if err := d.Decode(&sourceMondayUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceMondayUpdateAuthorizationMethodOAuth20 = sourceMondayUpdateAuthorizationMethodOAuth20 + u.Type = SourceMondayUpdateAuthorizationMethodTypeSourceMondayUpdateAuthorizationMethodOAuth20 return nil } @@ -132,14 +132,14 @@ func (u *SourceMondayUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error } func (u SourceMondayUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceMondayUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceMondayUpdateAuthorizationMethodOAuth20) - } - if u.SourceMondayUpdateAuthorizationMethodAPIToken != nil { return json.Marshal(u.SourceMondayUpdateAuthorizationMethodAPIToken) } + if u.SourceMondayUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceMondayUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemongodb.go b/internal/sdk/pkg/models/shared/sourcemongodb.go index c5ff9830c..f24c34207 100755 --- a/internal/sdk/pkg/models/shared/sourcemongodb.go +++ b/internal/sdk/pkg/models/shared/sourcemongodb.go @@ -199,6 +199,15 @@ func CreateSourceMongodbMongoDbInstanceTypeSourceMongodbMongoDBInstanceTypeMongo func (u *SourceMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error { var d *json.Decoder + sourceMongodbMongoDBInstanceTypeMongoDBAtlas := new(SourceMongodbMongoDBInstanceTypeMongoDBAtlas) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceMongodbMongoDBInstanceTypeMongoDBAtlas); err == nil { + u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas = sourceMongodbMongoDBInstanceTypeMongoDBAtlas + u.Type = SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDBInstanceTypeMongoDBAtlas + return nil + } + sourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance := new(SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() @@ -217,19 +226,14 @@ func (u *SourceMongodbMongoDbInstanceType) UnmarshalJSON(data []byte) error { return nil } - sourceMongodbMongoDBInstanceTypeMongoDBAtlas := new(SourceMongodbMongoDBInstanceTypeMongoDBAtlas) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&sourceMongodbMongoDBInstanceTypeMongoDBAtlas); err == nil { - u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas = sourceMongodbMongoDBInstanceTypeMongoDBAtlas - u.Type = SourceMongodbMongoDbInstanceTypeTypeSourceMongodbMongoDBInstanceTypeMongoDBAtlas - return nil - } - return errors.New("could not unmarshal into supported union types") } func (u SourceMongodbMongoDbInstanceType) MarshalJSON() ([]byte, error) { + if u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas != nil { + return json.Marshal(u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas) + } + if u.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance != nil { return json.Marshal(u.SourceMongodbMongoDbInstanceTypeStandaloneMongoDbInstance) } @@ -238,10 +242,6 @@ func (u SourceMongodbMongoDbInstanceType) MarshalJSON() ([]byte, error) { return json.Marshal(u.SourceMongodbMongoDbInstanceTypeReplicaSet) } - if u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas != nil { - return json.Marshal(u.SourceMongodbMongoDBInstanceTypeMongoDBAtlas) - } - return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemongodbupdate.go b/internal/sdk/pkg/models/shared/sourcemongodbupdate.go index 9b3f30cbb..cff26e942 100755 --- a/internal/sdk/pkg/models/shared/sourcemongodbupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemongodbupdate.go @@ -199,6 +199,15 @@ func CreateSourceMongodbUpdateMongoDbInstanceTypeSourceMongodbUpdateMongoDBInsta func (u *SourceMongodbUpdateMongoDbInstanceType) UnmarshalJSON(data []byte) error { var d *json.Decoder + sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas := new(SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas); err == nil { + u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas + u.Type = SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas + return nil + } + sourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance := new(SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() @@ -217,19 +226,14 @@ func (u *SourceMongodbUpdateMongoDbInstanceType) UnmarshalJSON(data []byte) erro return nil } - sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas := new(SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) - d = json.NewDecoder(bytes.NewReader(data)) - d.DisallowUnknownFields() - if err := d.Decode(&sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas); err == nil { - u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas = sourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas - u.Type = SourceMongodbUpdateMongoDbInstanceTypeTypeSourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas - return nil - } - return errors.New("could not unmarshal into supported union types") } func (u SourceMongodbUpdateMongoDbInstanceType) MarshalJSON() ([]byte, error) { + if u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil { + return json.Marshal(u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) + } + if u.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance != nil { return json.Marshal(u.SourceMongodbUpdateMongoDbInstanceTypeStandaloneMongoDbInstance) } @@ -238,10 +242,6 @@ func (u SourceMongodbUpdateMongoDbInstanceType) MarshalJSON() ([]byte, error) { return json.Marshal(u.SourceMongodbUpdateMongoDbInstanceTypeReplicaSet) } - if u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas != nil { - return json.Marshal(u.SourceMongodbUpdateMongoDBInstanceTypeMongoDBAtlas) - } - return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemssql.go b/internal/sdk/pkg/models/shared/sourcemssql.go index f49dbc06f..0d6f28e3c 100755 --- a/internal/sdk/pkg/models/shared/sourcemssql.go +++ b/internal/sdk/pkg/models/shared/sourcemssql.go @@ -9,189 +9,189 @@ import ( "fmt" ) -// SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. -type SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync string +type SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod string const ( - SourceMssqlReplicationMethodLogicalReplicationCDCDataToSyncExistingAndNew SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync = "Existing and New" - SourceMssqlReplicationMethodLogicalReplicationCDCDataToSyncNewChangesOnly SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync = "New Changes Only" + SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod = "STANDARD" ) -func (e SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync) ToPointer() *SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync { +func (e SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod { return &e } -func (e *SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "Existing and New": - fallthrough - case "New Changes Only": - *e = SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync(v) + case "STANDARD": + *e = SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v) } } -type SourceMssqlReplicationMethodLogicalReplicationCDCMethod string +// SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). +type SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor struct { + Method SourceMssqlUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"` +} + +// SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. +type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync string const ( - SourceMssqlReplicationMethodLogicalReplicationCDCMethodCdc SourceMssqlReplicationMethodLogicalReplicationCDCMethod = "CDC" + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncExistingAndNew SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "Existing and New" + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncNewChangesOnly SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "New Changes Only" ) -func (e SourceMssqlReplicationMethodLogicalReplicationCDCMethod) ToPointer() *SourceMssqlReplicationMethodLogicalReplicationCDCMethod { +func (e SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) ToPointer() *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync { return &e } -func (e *SourceMssqlReplicationMethodLogicalReplicationCDCMethod) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "CDC": - *e = SourceMssqlReplicationMethodLogicalReplicationCDCMethod(v) + case "Existing and New": + fallthrough + case "New Changes Only": + *e = SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlReplicationMethodLogicalReplicationCDCMethod: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync: %v", v) } } -// SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. -type SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel string +type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod string const ( - SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelSnapshot SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel = "Snapshot" - SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelReadCommitted SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel = "Read Committed" + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethodCdc SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod = "CDC" ) -func (e SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel { +func (e SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) ToPointer() *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod { return &e } -func (e *SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "Snapshot": - fallthrough - case "Read Committed": - *e = SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel(v) + case "CDC": + *e = SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod: %v", v) } } -// SourceMssqlReplicationMethodLogicalReplicationCDC - CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. -type SourceMssqlReplicationMethodLogicalReplicationCDC struct { - // What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. - DataToSync *SourceMssqlReplicationMethodLogicalReplicationCDCDataToSync `json:"data_to_sync,omitempty"` - // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. - InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"` - Method SourceMssqlReplicationMethodLogicalReplicationCDCMethod `json:"method"` - // Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. - SnapshotIsolation *SourceMssqlReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel `json:"snapshot_isolation,omitempty"` -} - -type SourceMssqlReplicationMethodStandardMethod string +// SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. +type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel string const ( - SourceMssqlReplicationMethodStandardMethodStandard SourceMssqlReplicationMethodStandardMethod = "STANDARD" + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelSnapshot SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Snapshot" + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelReadCommitted SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Read Committed" ) -func (e SourceMssqlReplicationMethodStandardMethod) ToPointer() *SourceMssqlReplicationMethodStandardMethod { +func (e SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel { return &e } -func (e *SourceMssqlReplicationMethodStandardMethod) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "STANDARD": - *e = SourceMssqlReplicationMethodStandardMethod(v) + case "Snapshot": + fallthrough + case "Read Committed": + *e = SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlReplicationMethodStandardMethod: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel: %v", v) } } -// SourceMssqlReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. -type SourceMssqlReplicationMethodStandard struct { - Method SourceMssqlReplicationMethodStandardMethod `json:"method"` +// SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. +type SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC struct { + // What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. + DataToSync *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync `json:"data_to_sync,omitempty"` + // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. + InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"` + Method SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod `json:"method"` + // Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. + SnapshotIsolation *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel `json:"snapshot_isolation,omitempty"` } -type SourceMssqlReplicationMethodType string +type SourceMssqlUpdateMethodType string const ( - SourceMssqlReplicationMethodTypeSourceMssqlReplicationMethodStandard SourceMssqlReplicationMethodType = "source-mssql_Replication Method_Standard" - SourceMssqlReplicationMethodTypeSourceMssqlReplicationMethodLogicalReplicationCDC SourceMssqlReplicationMethodType = "source-mssql_Replication Method_Logical Replication (CDC)" + SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateMethodType = "source-mssql_Update Method_Read Changes using Change Data Capture (CDC)" + SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateMethodType = "source-mssql_Update Method_Scan Changes with User Defined Cursor" ) -type SourceMssqlReplicationMethod struct { - SourceMssqlReplicationMethodStandard *SourceMssqlReplicationMethodStandard - SourceMssqlReplicationMethodLogicalReplicationCDC *SourceMssqlReplicationMethodLogicalReplicationCDC +type SourceMssqlUpdateMethod struct { + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC + SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor - Type SourceMssqlReplicationMethodType + Type SourceMssqlUpdateMethodType } -func CreateSourceMssqlReplicationMethodSourceMssqlReplicationMethodStandard(sourceMssqlReplicationMethodStandard SourceMssqlReplicationMethodStandard) SourceMssqlReplicationMethod { - typ := SourceMssqlReplicationMethodTypeSourceMssqlReplicationMethodStandard +func CreateSourceMssqlUpdateMethodSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC(sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC) SourceMssqlUpdateMethod { + typ := SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC - return SourceMssqlReplicationMethod{ - SourceMssqlReplicationMethodStandard: &sourceMssqlReplicationMethodStandard, - Type: typ, + return SourceMssqlUpdateMethod{ + SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC: &sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC, + Type: typ, } } -func CreateSourceMssqlReplicationMethodSourceMssqlReplicationMethodLogicalReplicationCDC(sourceMssqlReplicationMethodLogicalReplicationCDC SourceMssqlReplicationMethodLogicalReplicationCDC) SourceMssqlReplicationMethod { - typ := SourceMssqlReplicationMethodTypeSourceMssqlReplicationMethodLogicalReplicationCDC +func CreateSourceMssqlUpdateMethodSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor(sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor) SourceMssqlUpdateMethod { + typ := SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor - return SourceMssqlReplicationMethod{ - SourceMssqlReplicationMethodLogicalReplicationCDC: &sourceMssqlReplicationMethodLogicalReplicationCDC, + return SourceMssqlUpdateMethod{ + SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor: &sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor, Type: typ, } } -func (u *SourceMssqlReplicationMethod) UnmarshalJSON(data []byte) error { +func (u *SourceMssqlUpdateMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMssqlReplicationMethodStandard := new(SourceMssqlReplicationMethodStandard) + sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMssqlReplicationMethodStandard); err == nil { - u.SourceMssqlReplicationMethodStandard = sourceMssqlReplicationMethodStandard - u.Type = SourceMssqlReplicationMethodTypeSourceMssqlReplicationMethodStandard + if err := d.Decode(&sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor); err == nil { + u.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor = sourceMssqlUpdateMethodScanChangesWithUserDefinedCursor + u.Type = SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodScanChangesWithUserDefinedCursor return nil } - sourceMssqlReplicationMethodLogicalReplicationCDC := new(SourceMssqlReplicationMethodLogicalReplicationCDC) + sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC := new(SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMssqlReplicationMethodLogicalReplicationCDC); err == nil { - u.SourceMssqlReplicationMethodLogicalReplicationCDC = sourceMssqlReplicationMethodLogicalReplicationCDC - u.Type = SourceMssqlReplicationMethodTypeSourceMssqlReplicationMethodLogicalReplicationCDC + if err := d.Decode(&sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC); err == nil { + u.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC = sourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC + u.Type = SourceMssqlUpdateMethodTypeSourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC return nil } return errors.New("could not unmarshal into supported union types") } -func (u SourceMssqlReplicationMethod) MarshalJSON() ([]byte, error) { - if u.SourceMssqlReplicationMethodStandard != nil { - return json.Marshal(u.SourceMssqlReplicationMethodStandard) +func (u SourceMssqlUpdateMethod) MarshalJSON() ([]byte, error) { + if u.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor != nil { + return json.Marshal(u.SourceMssqlUpdateMethodScanChangesWithUserDefinedCursor) } - if u.SourceMssqlReplicationMethodLogicalReplicationCDC != nil { - return json.Marshal(u.SourceMssqlReplicationMethodLogicalReplicationCDC) + if u.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil { + return json.Marshal(u.SourceMssqlUpdateMethodReadChangesUsingChangeDataCaptureCDC) } return nil, nil @@ -561,8 +561,8 @@ type SourceMssql struct { Password *string `json:"password,omitempty"` // The port of the database. Port int64 `json:"port"` - // The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. - ReplicationMethod *SourceMssqlReplicationMethod `json:"replication_method,omitempty"` + // Configures how data is extracted from the database. + ReplicationMethod *SourceMssqlUpdateMethod `json:"replication_method,omitempty"` // The list of schemas to sync from. Defaults to user. Case sensitive. Schemas []string `json:"schemas,omitempty"` SourceType SourceMssqlMssql `json:"sourceType"` diff --git a/internal/sdk/pkg/models/shared/sourcemssqlupdate.go b/internal/sdk/pkg/models/shared/sourcemssqlupdate.go index a56fef803..f0d7ddf6f 100755 --- a/internal/sdk/pkg/models/shared/sourcemssqlupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemssqlupdate.go @@ -9,189 +9,189 @@ import ( "fmt" ) -// SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. -type SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync string +type SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod string const ( - SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSyncExistingAndNew SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync = "Existing and New" - SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSyncNewChangesOnly SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync = "New Changes Only" + SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod = "STANDARD" ) -func (e SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync) ToPointer() *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync { +func (e SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod { return &e } -func (e *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "Existing and New": - fallthrough - case "New Changes Only": - *e = SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync(v) + case "STANDARD": + *e = SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v) } } -type SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod string +// SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). +type SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor struct { + Method SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"` +} + +// SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync - What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. +type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync string const ( - SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethodCdc SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod = "CDC" + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncExistingAndNew SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "Existing and New" + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSyncNewChangesOnly SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync = "New Changes Only" ) -func (e SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod) ToPointer() *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod { +func (e SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) ToPointer() *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync { return &e } -func (e *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "CDC": - *e = SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod(v) + case "Existing and New": + fallthrough + case "New Changes Only": + *e = SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync: %v", v) } } -// SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. -type SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel string +type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod string const ( - SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelSnapshot SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel = "Snapshot" - SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevelReadCommitted SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel = "Read Committed" + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethodCdc SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod = "CDC" ) -func (e SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel { +func (e SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) ToPointer() *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod { return &e } -func (e *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "Snapshot": - fallthrough - case "Read Committed": - *e = SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel(v) + case "CDC": + *e = SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod: %v", v) } } -// SourceMssqlUpdateReplicationMethodLogicalReplicationCDC - CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. -type SourceMssqlUpdateReplicationMethodLogicalReplicationCDC struct { - // What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. - DataToSync *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCDataToSync `json:"data_to_sync,omitempty"` - // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. - InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"` - Method SourceMssqlUpdateReplicationMethodLogicalReplicationCDCMethod `json:"method"` - // Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. - SnapshotIsolation *SourceMssqlUpdateReplicationMethodLogicalReplicationCDCInitialSnapshotIsolationLevel `json:"snapshot_isolation,omitempty"` -} - -type SourceMssqlUpdateReplicationMethodStandardMethod string +// SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel - Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. +type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel string const ( - SourceMssqlUpdateReplicationMethodStandardMethodStandard SourceMssqlUpdateReplicationMethodStandardMethod = "STANDARD" + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelSnapshot SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Snapshot" + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevelReadCommitted SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel = "Read Committed" ) -func (e SourceMssqlUpdateReplicationMethodStandardMethod) ToPointer() *SourceMssqlUpdateReplicationMethodStandardMethod { +func (e SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) ToPointer() *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel { return &e } -func (e *SourceMssqlUpdateReplicationMethodStandardMethod) UnmarshalJSON(data []byte) error { +func (e *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "STANDARD": - *e = SourceMssqlUpdateReplicationMethodStandardMethod(v) + case "Snapshot": + fallthrough + case "Read Committed": + *e = SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel(v) return nil default: - return fmt.Errorf("invalid value for SourceMssqlUpdateReplicationMethodStandardMethod: %v", v) + return fmt.Errorf("invalid value for SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel: %v", v) } } -// SourceMssqlUpdateReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. -type SourceMssqlUpdateReplicationMethodStandard struct { - Method SourceMssqlUpdateReplicationMethodStandardMethod `json:"method"` +// SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the SQL Server's change data capture feature. This must be enabled on your database. +type SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC struct { + // What data should be synced under the CDC. "Existing and New" will read existing data as a snapshot, and sync new changes through CDC. "New Changes Only" will skip the initial snapshot, and only sync new changes through CDC. + DataToSync *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCDataToSync `json:"data_to_sync,omitempty"` + // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. + InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"` + Method SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCMethod `json:"method"` + // Existing data in the database are synced through an initial snapshot. This parameter controls the isolation level that will be used during the initial snapshotting. If you choose the "Snapshot" level, you must enable the snapshot isolation mode on the database. + SnapshotIsolation *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDCInitialSnapshotIsolationLevel `json:"snapshot_isolation,omitempty"` } -type SourceMssqlUpdateReplicationMethodType string +type SourceMssqlUpdateUpdateMethodType string const ( - SourceMssqlUpdateReplicationMethodTypeSourceMssqlUpdateReplicationMethodStandard SourceMssqlUpdateReplicationMethodType = "source-mssql-update_Replication Method_Standard" - SourceMssqlUpdateReplicationMethodTypeSourceMssqlUpdateReplicationMethodLogicalReplicationCDC SourceMssqlUpdateReplicationMethodType = "source-mssql-update_Replication Method_Logical Replication (CDC)" + SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateUpdateMethodType = "source-mssql-update_Update Method_Read Changes using Change Data Capture (CDC)" + SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateUpdateMethodType = "source-mssql-update_Update Method_Scan Changes with User Defined Cursor" ) -type SourceMssqlUpdateReplicationMethod struct { - SourceMssqlUpdateReplicationMethodStandard *SourceMssqlUpdateReplicationMethodStandard - SourceMssqlUpdateReplicationMethodLogicalReplicationCDC *SourceMssqlUpdateReplicationMethodLogicalReplicationCDC +type SourceMssqlUpdateUpdateMethod struct { + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC *SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC + SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor - Type SourceMssqlUpdateReplicationMethodType + Type SourceMssqlUpdateUpdateMethodType } -func CreateSourceMssqlUpdateReplicationMethodSourceMssqlUpdateReplicationMethodStandard(sourceMssqlUpdateReplicationMethodStandard SourceMssqlUpdateReplicationMethodStandard) SourceMssqlUpdateReplicationMethod { - typ := SourceMssqlUpdateReplicationMethodTypeSourceMssqlUpdateReplicationMethodStandard +func CreateSourceMssqlUpdateUpdateMethodSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC(sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC) SourceMssqlUpdateUpdateMethod { + typ := SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC - return SourceMssqlUpdateReplicationMethod{ - SourceMssqlUpdateReplicationMethodStandard: &sourceMssqlUpdateReplicationMethodStandard, + return SourceMssqlUpdateUpdateMethod{ + SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC: &sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC, Type: typ, } } -func CreateSourceMssqlUpdateReplicationMethodSourceMssqlUpdateReplicationMethodLogicalReplicationCDC(sourceMssqlUpdateReplicationMethodLogicalReplicationCDC SourceMssqlUpdateReplicationMethodLogicalReplicationCDC) SourceMssqlUpdateReplicationMethod { - typ := SourceMssqlUpdateReplicationMethodTypeSourceMssqlUpdateReplicationMethodLogicalReplicationCDC +func CreateSourceMssqlUpdateUpdateMethodSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor(sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) SourceMssqlUpdateUpdateMethod { + typ := SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor - return SourceMssqlUpdateReplicationMethod{ - SourceMssqlUpdateReplicationMethodLogicalReplicationCDC: &sourceMssqlUpdateReplicationMethodLogicalReplicationCDC, + return SourceMssqlUpdateUpdateMethod{ + SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor: &sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor, Type: typ, } } -func (u *SourceMssqlUpdateReplicationMethod) UnmarshalJSON(data []byte) error { +func (u *SourceMssqlUpdateUpdateMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMssqlUpdateReplicationMethodStandard := new(SourceMssqlUpdateReplicationMethodStandard) + sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMssqlUpdateReplicationMethodStandard); err == nil { - u.SourceMssqlUpdateReplicationMethodStandard = sourceMssqlUpdateReplicationMethodStandard - u.Type = SourceMssqlUpdateReplicationMethodTypeSourceMssqlUpdateReplicationMethodStandard + if err := d.Decode(&sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil { + u.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor + u.Type = SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor return nil } - sourceMssqlUpdateReplicationMethodLogicalReplicationCDC := new(SourceMssqlUpdateReplicationMethodLogicalReplicationCDC) + sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC := new(SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMssqlUpdateReplicationMethodLogicalReplicationCDC); err == nil { - u.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC = sourceMssqlUpdateReplicationMethodLogicalReplicationCDC - u.Type = SourceMssqlUpdateReplicationMethodTypeSourceMssqlUpdateReplicationMethodLogicalReplicationCDC + if err := d.Decode(&sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC); err == nil { + u.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC = sourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC + u.Type = SourceMssqlUpdateUpdateMethodTypeSourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC return nil } return errors.New("could not unmarshal into supported union types") } -func (u SourceMssqlUpdateReplicationMethod) MarshalJSON() ([]byte, error) { - if u.SourceMssqlUpdateReplicationMethodStandard != nil { - return json.Marshal(u.SourceMssqlUpdateReplicationMethodStandard) +func (u SourceMssqlUpdateUpdateMethod) MarshalJSON() ([]byte, error) { + if u.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { + return json.Marshal(u.SourceMssqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) } - if u.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC != nil { - return json.Marshal(u.SourceMssqlUpdateReplicationMethodLogicalReplicationCDC) + if u.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC != nil { + return json.Marshal(u.SourceMssqlUpdateUpdateMethodReadChangesUsingChangeDataCaptureCDC) } return nil, nil @@ -537,8 +537,8 @@ type SourceMssqlUpdate struct { Password *string `json:"password,omitempty"` // The port of the database. Port int64 `json:"port"` - // The replication method used for extracting data from the database. STANDARD replication requires no setup on the DB side but will not be able to represent deletions incrementally. CDC uses {TBC} to detect inserts, updates, and deletes. This needs to be configured on the source database itself. - ReplicationMethod *SourceMssqlUpdateReplicationMethod `json:"replication_method,omitempty"` + // Configures how data is extracted from the database. + ReplicationMethod *SourceMssqlUpdateUpdateMethod `json:"replication_method,omitempty"` // The list of schemas to sync from. Defaults to user. Case sensitive. Schemas []string `json:"schemas,omitempty"` // The encryption method which is used when communicating with the database. diff --git a/internal/sdk/pkg/models/shared/sourcemysql.go b/internal/sdk/pkg/models/shared/sourcemysql.go index e9ab73f51..43b166b83 100755 --- a/internal/sdk/pkg/models/shared/sourcemysql.go +++ b/internal/sdk/pkg/models/shared/sourcemysql.go @@ -106,21 +106,21 @@ func CreateSourceMysqlUpdateMethodSourceMysqlUpdateMethodScanChangesWithUserDefi func (u *SourceMysqlUpdateMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC := new(SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC) + sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC); err == nil { - u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC = sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC - u.Type = SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC + if err := d.Decode(&sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor); err == nil { + u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor = sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor + u.Type = SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodScanChangesWithUserDefinedCursor return nil } - sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor) + sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC := new(SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor); err == nil { - u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor = sourceMysqlUpdateMethodScanChangesWithUserDefinedCursor - u.Type = SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodScanChangesWithUserDefinedCursor + if err := d.Decode(&sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC); err == nil { + u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC = sourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC + u.Type = SourceMysqlUpdateMethodTypeSourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC return nil } @@ -128,14 +128,14 @@ func (u *SourceMysqlUpdateMethod) UnmarshalJSON(data []byte) error { } func (u SourceMysqlUpdateMethod) MarshalJSON() ([]byte, error) { - if u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC != nil { - return json.Marshal(u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC) - } - if u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor != nil { return json.Marshal(u.SourceMysqlUpdateMethodScanChangesWithUserDefinedCursor) } + if u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC != nil { + return json.Marshal(u.SourceMysqlUpdateMethodReadChangesUsingBinaryLogCDC) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcemysqlupdate.go b/internal/sdk/pkg/models/shared/sourcemysqlupdate.go index 38e0c7ee9..4a2b8d84b 100755 --- a/internal/sdk/pkg/models/shared/sourcemysqlupdate.go +++ b/internal/sdk/pkg/models/shared/sourcemysqlupdate.go @@ -106,21 +106,21 @@ func CreateSourceMysqlUpdateUpdateMethodSourceMysqlUpdateUpdateMethodScanChanges func (u *SourceMysqlUpdateUpdateMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC := new(SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC) + sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC); err == nil { - u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC = sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC - u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC + if err := d.Decode(&sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil { + u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor + u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor return nil } - sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) + sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC := new(SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil { - u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor - u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor + if err := d.Decode(&sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC); err == nil { + u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC = sourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC + u.Type = SourceMysqlUpdateUpdateMethodTypeSourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC return nil } @@ -128,14 +128,14 @@ func (u *SourceMysqlUpdateUpdateMethod) UnmarshalJSON(data []byte) error { } func (u SourceMysqlUpdateUpdateMethod) MarshalJSON() ([]byte, error) { - if u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC != nil { - return json.Marshal(u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC) - } - if u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { return json.Marshal(u.SourceMysqlUpdateUpdateMethodScanChangesWithUserDefinedCursor) } + if u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC != nil { + return json.Marshal(u.SourceMysqlUpdateUpdateMethodReadChangesUsingBinaryLogCDC) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcenotion.go b/internal/sdk/pkg/models/shared/sourcenotion.go index 4a3bd388f..1a8b43d86 100755 --- a/internal/sdk/pkg/models/shared/sourcenotion.go +++ b/internal/sdk/pkg/models/shared/sourcenotion.go @@ -111,21 +111,21 @@ func CreateSourceNotionAuthenticateUsingSourceNotionAuthenticateUsingAccessToken func (u *SourceNotionAuthenticateUsing) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceNotionAuthenticateUsingOAuth20 := new(SourceNotionAuthenticateUsingOAuth20) + sourceNotionAuthenticateUsingAccessToken := new(SourceNotionAuthenticateUsingAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceNotionAuthenticateUsingOAuth20); err == nil { - u.SourceNotionAuthenticateUsingOAuth20 = sourceNotionAuthenticateUsingOAuth20 - u.Type = SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingOAuth20 + if err := d.Decode(&sourceNotionAuthenticateUsingAccessToken); err == nil { + u.SourceNotionAuthenticateUsingAccessToken = sourceNotionAuthenticateUsingAccessToken + u.Type = SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingAccessToken return nil } - sourceNotionAuthenticateUsingAccessToken := new(SourceNotionAuthenticateUsingAccessToken) + sourceNotionAuthenticateUsingOAuth20 := new(SourceNotionAuthenticateUsingOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceNotionAuthenticateUsingAccessToken); err == nil { - u.SourceNotionAuthenticateUsingAccessToken = sourceNotionAuthenticateUsingAccessToken - u.Type = SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingAccessToken + if err := d.Decode(&sourceNotionAuthenticateUsingOAuth20); err == nil { + u.SourceNotionAuthenticateUsingOAuth20 = sourceNotionAuthenticateUsingOAuth20 + u.Type = SourceNotionAuthenticateUsingTypeSourceNotionAuthenticateUsingOAuth20 return nil } @@ -133,14 +133,14 @@ func (u *SourceNotionAuthenticateUsing) UnmarshalJSON(data []byte) error { } func (u SourceNotionAuthenticateUsing) MarshalJSON() ([]byte, error) { - if u.SourceNotionAuthenticateUsingOAuth20 != nil { - return json.Marshal(u.SourceNotionAuthenticateUsingOAuth20) - } - if u.SourceNotionAuthenticateUsingAccessToken != nil { return json.Marshal(u.SourceNotionAuthenticateUsingAccessToken) } + if u.SourceNotionAuthenticateUsingOAuth20 != nil { + return json.Marshal(u.SourceNotionAuthenticateUsingOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcenotionupdate.go b/internal/sdk/pkg/models/shared/sourcenotionupdate.go index 2aebefee1..f424fe1d5 100755 --- a/internal/sdk/pkg/models/shared/sourcenotionupdate.go +++ b/internal/sdk/pkg/models/shared/sourcenotionupdate.go @@ -111,21 +111,21 @@ func CreateSourceNotionUpdateAuthenticateUsingSourceNotionUpdateAuthenticateUsin func (u *SourceNotionUpdateAuthenticateUsing) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceNotionUpdateAuthenticateUsingOAuth20 := new(SourceNotionUpdateAuthenticateUsingOAuth20) + sourceNotionUpdateAuthenticateUsingAccessToken := new(SourceNotionUpdateAuthenticateUsingAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceNotionUpdateAuthenticateUsingOAuth20); err == nil { - u.SourceNotionUpdateAuthenticateUsingOAuth20 = sourceNotionUpdateAuthenticateUsingOAuth20 - u.Type = SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingOAuth20 + if err := d.Decode(&sourceNotionUpdateAuthenticateUsingAccessToken); err == nil { + u.SourceNotionUpdateAuthenticateUsingAccessToken = sourceNotionUpdateAuthenticateUsingAccessToken + u.Type = SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingAccessToken return nil } - sourceNotionUpdateAuthenticateUsingAccessToken := new(SourceNotionUpdateAuthenticateUsingAccessToken) + sourceNotionUpdateAuthenticateUsingOAuth20 := new(SourceNotionUpdateAuthenticateUsingOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceNotionUpdateAuthenticateUsingAccessToken); err == nil { - u.SourceNotionUpdateAuthenticateUsingAccessToken = sourceNotionUpdateAuthenticateUsingAccessToken - u.Type = SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingAccessToken + if err := d.Decode(&sourceNotionUpdateAuthenticateUsingOAuth20); err == nil { + u.SourceNotionUpdateAuthenticateUsingOAuth20 = sourceNotionUpdateAuthenticateUsingOAuth20 + u.Type = SourceNotionUpdateAuthenticateUsingTypeSourceNotionUpdateAuthenticateUsingOAuth20 return nil } @@ -133,14 +133,14 @@ func (u *SourceNotionUpdateAuthenticateUsing) UnmarshalJSON(data []byte) error { } func (u SourceNotionUpdateAuthenticateUsing) MarshalJSON() ([]byte, error) { - if u.SourceNotionUpdateAuthenticateUsingOAuth20 != nil { - return json.Marshal(u.SourceNotionUpdateAuthenticateUsingOAuth20) - } - if u.SourceNotionUpdateAuthenticateUsingAccessToken != nil { return json.Marshal(u.SourceNotionUpdateAuthenticateUsingAccessToken) } + if u.SourceNotionUpdateAuthenticateUsingOAuth20 != nil { + return json.Marshal(u.SourceNotionUpdateAuthenticateUsingOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceokta.go b/internal/sdk/pkg/models/shared/sourceokta.go index e1d770358..b7ac00a18 100755 --- a/internal/sdk/pkg/models/shared/sourceokta.go +++ b/internal/sdk/pkg/models/shared/sourceokta.go @@ -108,21 +108,21 @@ func CreateSourceOktaAuthorizationMethodSourceOktaAuthorizationMethodAPIToken(so func (u *SourceOktaAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceOktaAuthorizationMethodOAuth20 := new(SourceOktaAuthorizationMethodOAuth20) + sourceOktaAuthorizationMethodAPIToken := new(SourceOktaAuthorizationMethodAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceOktaAuthorizationMethodOAuth20); err == nil { - u.SourceOktaAuthorizationMethodOAuth20 = sourceOktaAuthorizationMethodOAuth20 - u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodOAuth20 + if err := d.Decode(&sourceOktaAuthorizationMethodAPIToken); err == nil { + u.SourceOktaAuthorizationMethodAPIToken = sourceOktaAuthorizationMethodAPIToken + u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodAPIToken return nil } - sourceOktaAuthorizationMethodAPIToken := new(SourceOktaAuthorizationMethodAPIToken) + sourceOktaAuthorizationMethodOAuth20 := new(SourceOktaAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceOktaAuthorizationMethodAPIToken); err == nil { - u.SourceOktaAuthorizationMethodAPIToken = sourceOktaAuthorizationMethodAPIToken - u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodAPIToken + if err := d.Decode(&sourceOktaAuthorizationMethodOAuth20); err == nil { + u.SourceOktaAuthorizationMethodOAuth20 = sourceOktaAuthorizationMethodOAuth20 + u.Type = SourceOktaAuthorizationMethodTypeSourceOktaAuthorizationMethodOAuth20 return nil } @@ -130,14 +130,14 @@ func (u *SourceOktaAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceOktaAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceOktaAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceOktaAuthorizationMethodOAuth20) - } - if u.SourceOktaAuthorizationMethodAPIToken != nil { return json.Marshal(u.SourceOktaAuthorizationMethodAPIToken) } + if u.SourceOktaAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceOktaAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceoktaupdate.go b/internal/sdk/pkg/models/shared/sourceoktaupdate.go index 9bd777ba1..df93dccc7 100755 --- a/internal/sdk/pkg/models/shared/sourceoktaupdate.go +++ b/internal/sdk/pkg/models/shared/sourceoktaupdate.go @@ -108,21 +108,21 @@ func CreateSourceOktaUpdateAuthorizationMethodSourceOktaUpdateAuthorizationMetho func (u *SourceOktaUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceOktaUpdateAuthorizationMethodOAuth20 := new(SourceOktaUpdateAuthorizationMethodOAuth20) + sourceOktaUpdateAuthorizationMethodAPIToken := new(SourceOktaUpdateAuthorizationMethodAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceOktaUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceOktaUpdateAuthorizationMethodOAuth20 = sourceOktaUpdateAuthorizationMethodOAuth20 - u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceOktaUpdateAuthorizationMethodAPIToken); err == nil { + u.SourceOktaUpdateAuthorizationMethodAPIToken = sourceOktaUpdateAuthorizationMethodAPIToken + u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodAPIToken return nil } - sourceOktaUpdateAuthorizationMethodAPIToken := new(SourceOktaUpdateAuthorizationMethodAPIToken) + sourceOktaUpdateAuthorizationMethodOAuth20 := new(SourceOktaUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceOktaUpdateAuthorizationMethodAPIToken); err == nil { - u.SourceOktaUpdateAuthorizationMethodAPIToken = sourceOktaUpdateAuthorizationMethodAPIToken - u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodAPIToken + if err := d.Decode(&sourceOktaUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceOktaUpdateAuthorizationMethodOAuth20 = sourceOktaUpdateAuthorizationMethodOAuth20 + u.Type = SourceOktaUpdateAuthorizationMethodTypeSourceOktaUpdateAuthorizationMethodOAuth20 return nil } @@ -130,14 +130,14 @@ func (u *SourceOktaUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceOktaUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceOktaUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceOktaUpdateAuthorizationMethodOAuth20) - } - if u.SourceOktaUpdateAuthorizationMethodAPIToken != nil { return json.Marshal(u.SourceOktaUpdateAuthorizationMethodAPIToken) } + if u.SourceOktaUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceOktaUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceopenweather.go b/internal/sdk/pkg/models/shared/sourceopenweather.go deleted file mode 100755 index 01d195e0a..000000000 --- a/internal/sdk/pkg/models/shared/sourceopenweather.go +++ /dev/null @@ -1,246 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -import ( - "encoding/json" - "fmt" -) - -// SourceOpenweatherLanguage - You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. -type SourceOpenweatherLanguage string - -const ( - SourceOpenweatherLanguageAf SourceOpenweatherLanguage = "af" - SourceOpenweatherLanguageAl SourceOpenweatherLanguage = "al" - SourceOpenweatherLanguageAr SourceOpenweatherLanguage = "ar" - SourceOpenweatherLanguageAz SourceOpenweatherLanguage = "az" - SourceOpenweatherLanguageBg SourceOpenweatherLanguage = "bg" - SourceOpenweatherLanguageCa SourceOpenweatherLanguage = "ca" - SourceOpenweatherLanguageCz SourceOpenweatherLanguage = "cz" - SourceOpenweatherLanguageDa SourceOpenweatherLanguage = "da" - SourceOpenweatherLanguageDe SourceOpenweatherLanguage = "de" - SourceOpenweatherLanguageEl SourceOpenweatherLanguage = "el" - SourceOpenweatherLanguageEn SourceOpenweatherLanguage = "en" - SourceOpenweatherLanguageEu SourceOpenweatherLanguage = "eu" - SourceOpenweatherLanguageFa SourceOpenweatherLanguage = "fa" - SourceOpenweatherLanguageFi SourceOpenweatherLanguage = "fi" - SourceOpenweatherLanguageFr SourceOpenweatherLanguage = "fr" - SourceOpenweatherLanguageGl SourceOpenweatherLanguage = "gl" - SourceOpenweatherLanguageHe SourceOpenweatherLanguage = "he" - SourceOpenweatherLanguageHi SourceOpenweatherLanguage = "hi" - SourceOpenweatherLanguageHr SourceOpenweatherLanguage = "hr" - SourceOpenweatherLanguageHu SourceOpenweatherLanguage = "hu" - SourceOpenweatherLanguageID SourceOpenweatherLanguage = "id" - SourceOpenweatherLanguageIt SourceOpenweatherLanguage = "it" - SourceOpenweatherLanguageJa SourceOpenweatherLanguage = "ja" - SourceOpenweatherLanguageKr SourceOpenweatherLanguage = "kr" - SourceOpenweatherLanguageLa SourceOpenweatherLanguage = "la" - SourceOpenweatherLanguageLt SourceOpenweatherLanguage = "lt" - SourceOpenweatherLanguageMk SourceOpenweatherLanguage = "mk" - SourceOpenweatherLanguageNo SourceOpenweatherLanguage = "no" - SourceOpenweatherLanguageNl SourceOpenweatherLanguage = "nl" - SourceOpenweatherLanguagePl SourceOpenweatherLanguage = "pl" - SourceOpenweatherLanguagePt SourceOpenweatherLanguage = "pt" - SourceOpenweatherLanguagePtBr SourceOpenweatherLanguage = "pt_br" - SourceOpenweatherLanguageRo SourceOpenweatherLanguage = "ro" - SourceOpenweatherLanguageRu SourceOpenweatherLanguage = "ru" - SourceOpenweatherLanguageSv SourceOpenweatherLanguage = "sv" - SourceOpenweatherLanguageSe SourceOpenweatherLanguage = "se" - SourceOpenweatherLanguageSk SourceOpenweatherLanguage = "sk" - SourceOpenweatherLanguageSl SourceOpenweatherLanguage = "sl" - SourceOpenweatherLanguageSp SourceOpenweatherLanguage = "sp" - SourceOpenweatherLanguageEs SourceOpenweatherLanguage = "es" - SourceOpenweatherLanguageSr SourceOpenweatherLanguage = "sr" - SourceOpenweatherLanguageTh SourceOpenweatherLanguage = "th" - SourceOpenweatherLanguageTr SourceOpenweatherLanguage = "tr" - SourceOpenweatherLanguageUa SourceOpenweatherLanguage = "ua" - SourceOpenweatherLanguageUk SourceOpenweatherLanguage = "uk" - SourceOpenweatherLanguageVi SourceOpenweatherLanguage = "vi" - SourceOpenweatherLanguageZhCn SourceOpenweatherLanguage = "zh_cn" - SourceOpenweatherLanguageZhTw SourceOpenweatherLanguage = "zh_tw" - SourceOpenweatherLanguageZu SourceOpenweatherLanguage = "zu" -) - -func (e SourceOpenweatherLanguage) ToPointer() *SourceOpenweatherLanguage { - return &e -} - -func (e *SourceOpenweatherLanguage) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "af": - fallthrough - case "al": - fallthrough - case "ar": - fallthrough - case "az": - fallthrough - case "bg": - fallthrough - case "ca": - fallthrough - case "cz": - fallthrough - case "da": - fallthrough - case "de": - fallthrough - case "el": - fallthrough - case "en": - fallthrough - case "eu": - fallthrough - case "fa": - fallthrough - case "fi": - fallthrough - case "fr": - fallthrough - case "gl": - fallthrough - case "he": - fallthrough - case "hi": - fallthrough - case "hr": - fallthrough - case "hu": - fallthrough - case "id": - fallthrough - case "it": - fallthrough - case "ja": - fallthrough - case "kr": - fallthrough - case "la": - fallthrough - case "lt": - fallthrough - case "mk": - fallthrough - case "no": - fallthrough - case "nl": - fallthrough - case "pl": - fallthrough - case "pt": - fallthrough - case "pt_br": - fallthrough - case "ro": - fallthrough - case "ru": - fallthrough - case "sv": - fallthrough - case "se": - fallthrough - case "sk": - fallthrough - case "sl": - fallthrough - case "sp": - fallthrough - case "es": - fallthrough - case "sr": - fallthrough - case "th": - fallthrough - case "tr": - fallthrough - case "ua": - fallthrough - case "uk": - fallthrough - case "vi": - fallthrough - case "zh_cn": - fallthrough - case "zh_tw": - fallthrough - case "zu": - *e = SourceOpenweatherLanguage(v) - return nil - default: - return fmt.Errorf("invalid value for SourceOpenweatherLanguage: %v", v) - } -} - -type SourceOpenweatherOpenweather string - -const ( - SourceOpenweatherOpenweatherOpenweather SourceOpenweatherOpenweather = "openweather" -) - -func (e SourceOpenweatherOpenweather) ToPointer() *SourceOpenweatherOpenweather { - return &e -} - -func (e *SourceOpenweatherOpenweather) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "openweather": - *e = SourceOpenweatherOpenweather(v) - return nil - default: - return fmt.Errorf("invalid value for SourceOpenweatherOpenweather: %v", v) - } -} - -// SourceOpenweatherUnits - Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. -type SourceOpenweatherUnits string - -const ( - SourceOpenweatherUnitsStandard SourceOpenweatherUnits = "standard" - SourceOpenweatherUnitsMetric SourceOpenweatherUnits = "metric" - SourceOpenweatherUnitsImperial SourceOpenweatherUnits = "imperial" -) - -func (e SourceOpenweatherUnits) ToPointer() *SourceOpenweatherUnits { - return &e -} - -func (e *SourceOpenweatherUnits) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "standard": - fallthrough - case "metric": - fallthrough - case "imperial": - *e = SourceOpenweatherUnits(v) - return nil - default: - return fmt.Errorf("invalid value for SourceOpenweatherUnits: %v", v) - } -} - -type SourceOpenweather struct { - // Your OpenWeather API Key. See here. The key is case sensitive. - Appid string `json:"appid"` - // You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. - Lang *SourceOpenweatherLanguage `json:"lang,omitempty"` - // Latitude for which you want to get weather condition from. (min -90, max 90) - Lat string `json:"lat"` - // Longitude for which you want to get weather condition from. (min -180, max 180) - Lon string `json:"lon"` - SourceType SourceOpenweatherOpenweather `json:"sourceType"` - // Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - Units *SourceOpenweatherUnits `json:"units,omitempty"` -} diff --git a/internal/sdk/pkg/models/shared/sourceopenweathercreaterequest.go b/internal/sdk/pkg/models/shared/sourceopenweathercreaterequest.go deleted file mode 100755 index 6fa19f8cb..000000000 --- a/internal/sdk/pkg/models/shared/sourceopenweathercreaterequest.go +++ /dev/null @@ -1,11 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -type SourceOpenweatherCreateRequest struct { - Configuration SourceOpenweather `json:"configuration"` - Name string `json:"name"` - // Optional secretID obtained through the public API OAuth redirect flow. - SecretID *string `json:"secretId,omitempty"` - WorkspaceID string `json:"workspaceId"` -} diff --git a/internal/sdk/pkg/models/shared/sourceopenweatherupdate.go b/internal/sdk/pkg/models/shared/sourceopenweatherupdate.go deleted file mode 100755 index e8192fec7..000000000 --- a/internal/sdk/pkg/models/shared/sourceopenweatherupdate.go +++ /dev/null @@ -1,221 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -import ( - "encoding/json" - "fmt" -) - -// SourceOpenweatherUpdateLanguage - You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. -type SourceOpenweatherUpdateLanguage string - -const ( - SourceOpenweatherUpdateLanguageAf SourceOpenweatherUpdateLanguage = "af" - SourceOpenweatherUpdateLanguageAl SourceOpenweatherUpdateLanguage = "al" - SourceOpenweatherUpdateLanguageAr SourceOpenweatherUpdateLanguage = "ar" - SourceOpenweatherUpdateLanguageAz SourceOpenweatherUpdateLanguage = "az" - SourceOpenweatherUpdateLanguageBg SourceOpenweatherUpdateLanguage = "bg" - SourceOpenweatherUpdateLanguageCa SourceOpenweatherUpdateLanguage = "ca" - SourceOpenweatherUpdateLanguageCz SourceOpenweatherUpdateLanguage = "cz" - SourceOpenweatherUpdateLanguageDa SourceOpenweatherUpdateLanguage = "da" - SourceOpenweatherUpdateLanguageDe SourceOpenweatherUpdateLanguage = "de" - SourceOpenweatherUpdateLanguageEl SourceOpenweatherUpdateLanguage = "el" - SourceOpenweatherUpdateLanguageEn SourceOpenweatherUpdateLanguage = "en" - SourceOpenweatherUpdateLanguageEu SourceOpenweatherUpdateLanguage = "eu" - SourceOpenweatherUpdateLanguageFa SourceOpenweatherUpdateLanguage = "fa" - SourceOpenweatherUpdateLanguageFi SourceOpenweatherUpdateLanguage = "fi" - SourceOpenweatherUpdateLanguageFr SourceOpenweatherUpdateLanguage = "fr" - SourceOpenweatherUpdateLanguageGl SourceOpenweatherUpdateLanguage = "gl" - SourceOpenweatherUpdateLanguageHe SourceOpenweatherUpdateLanguage = "he" - SourceOpenweatherUpdateLanguageHi SourceOpenweatherUpdateLanguage = "hi" - SourceOpenweatherUpdateLanguageHr SourceOpenweatherUpdateLanguage = "hr" - SourceOpenweatherUpdateLanguageHu SourceOpenweatherUpdateLanguage = "hu" - SourceOpenweatherUpdateLanguageID SourceOpenweatherUpdateLanguage = "id" - SourceOpenweatherUpdateLanguageIt SourceOpenweatherUpdateLanguage = "it" - SourceOpenweatherUpdateLanguageJa SourceOpenweatherUpdateLanguage = "ja" - SourceOpenweatherUpdateLanguageKr SourceOpenweatherUpdateLanguage = "kr" - SourceOpenweatherUpdateLanguageLa SourceOpenweatherUpdateLanguage = "la" - SourceOpenweatherUpdateLanguageLt SourceOpenweatherUpdateLanguage = "lt" - SourceOpenweatherUpdateLanguageMk SourceOpenweatherUpdateLanguage = "mk" - SourceOpenweatherUpdateLanguageNo SourceOpenweatherUpdateLanguage = "no" - SourceOpenweatherUpdateLanguageNl SourceOpenweatherUpdateLanguage = "nl" - SourceOpenweatherUpdateLanguagePl SourceOpenweatherUpdateLanguage = "pl" - SourceOpenweatherUpdateLanguagePt SourceOpenweatherUpdateLanguage = "pt" - SourceOpenweatherUpdateLanguagePtBr SourceOpenweatherUpdateLanguage = "pt_br" - SourceOpenweatherUpdateLanguageRo SourceOpenweatherUpdateLanguage = "ro" - SourceOpenweatherUpdateLanguageRu SourceOpenweatherUpdateLanguage = "ru" - SourceOpenweatherUpdateLanguageSv SourceOpenweatherUpdateLanguage = "sv" - SourceOpenweatherUpdateLanguageSe SourceOpenweatherUpdateLanguage = "se" - SourceOpenweatherUpdateLanguageSk SourceOpenweatherUpdateLanguage = "sk" - SourceOpenweatherUpdateLanguageSl SourceOpenweatherUpdateLanguage = "sl" - SourceOpenweatherUpdateLanguageSp SourceOpenweatherUpdateLanguage = "sp" - SourceOpenweatherUpdateLanguageEs SourceOpenweatherUpdateLanguage = "es" - SourceOpenweatherUpdateLanguageSr SourceOpenweatherUpdateLanguage = "sr" - SourceOpenweatherUpdateLanguageTh SourceOpenweatherUpdateLanguage = "th" - SourceOpenweatherUpdateLanguageTr SourceOpenweatherUpdateLanguage = "tr" - SourceOpenweatherUpdateLanguageUa SourceOpenweatherUpdateLanguage = "ua" - SourceOpenweatherUpdateLanguageUk SourceOpenweatherUpdateLanguage = "uk" - SourceOpenweatherUpdateLanguageVi SourceOpenweatherUpdateLanguage = "vi" - SourceOpenweatherUpdateLanguageZhCn SourceOpenweatherUpdateLanguage = "zh_cn" - SourceOpenweatherUpdateLanguageZhTw SourceOpenweatherUpdateLanguage = "zh_tw" - SourceOpenweatherUpdateLanguageZu SourceOpenweatherUpdateLanguage = "zu" -) - -func (e SourceOpenweatherUpdateLanguage) ToPointer() *SourceOpenweatherUpdateLanguage { - return &e -} - -func (e *SourceOpenweatherUpdateLanguage) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "af": - fallthrough - case "al": - fallthrough - case "ar": - fallthrough - case "az": - fallthrough - case "bg": - fallthrough - case "ca": - fallthrough - case "cz": - fallthrough - case "da": - fallthrough - case "de": - fallthrough - case "el": - fallthrough - case "en": - fallthrough - case "eu": - fallthrough - case "fa": - fallthrough - case "fi": - fallthrough - case "fr": - fallthrough - case "gl": - fallthrough - case "he": - fallthrough - case "hi": - fallthrough - case "hr": - fallthrough - case "hu": - fallthrough - case "id": - fallthrough - case "it": - fallthrough - case "ja": - fallthrough - case "kr": - fallthrough - case "la": - fallthrough - case "lt": - fallthrough - case "mk": - fallthrough - case "no": - fallthrough - case "nl": - fallthrough - case "pl": - fallthrough - case "pt": - fallthrough - case "pt_br": - fallthrough - case "ro": - fallthrough - case "ru": - fallthrough - case "sv": - fallthrough - case "se": - fallthrough - case "sk": - fallthrough - case "sl": - fallthrough - case "sp": - fallthrough - case "es": - fallthrough - case "sr": - fallthrough - case "th": - fallthrough - case "tr": - fallthrough - case "ua": - fallthrough - case "uk": - fallthrough - case "vi": - fallthrough - case "zh_cn": - fallthrough - case "zh_tw": - fallthrough - case "zu": - *e = SourceOpenweatherUpdateLanguage(v) - return nil - default: - return fmt.Errorf("invalid value for SourceOpenweatherUpdateLanguage: %v", v) - } -} - -// SourceOpenweatherUpdateUnits - Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. -type SourceOpenweatherUpdateUnits string - -const ( - SourceOpenweatherUpdateUnitsStandard SourceOpenweatherUpdateUnits = "standard" - SourceOpenweatherUpdateUnitsMetric SourceOpenweatherUpdateUnits = "metric" - SourceOpenweatherUpdateUnitsImperial SourceOpenweatherUpdateUnits = "imperial" -) - -func (e SourceOpenweatherUpdateUnits) ToPointer() *SourceOpenweatherUpdateUnits { - return &e -} - -func (e *SourceOpenweatherUpdateUnits) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "standard": - fallthrough - case "metric": - fallthrough - case "imperial": - *e = SourceOpenweatherUpdateUnits(v) - return nil - default: - return fmt.Errorf("invalid value for SourceOpenweatherUpdateUnits: %v", v) - } -} - -type SourceOpenweatherUpdate struct { - // Your OpenWeather API Key. See here. The key is case sensitive. - Appid string `json:"appid"` - // You can use lang parameter to get the output in your language. The contents of the description field will be translated. See here for the list of supported languages. - Lang *SourceOpenweatherUpdateLanguage `json:"lang,omitempty"` - // Latitude for which you want to get weather condition from. (min -90, max 90) - Lat string `json:"lat"` - // Longitude for which you want to get weather condition from. (min -180, max 180) - Lon string `json:"lon"` - // Units of measurement. standard, metric and imperial units are available. If you do not use the units parameter, standard units will be applied by default. - Units *SourceOpenweatherUpdateUnits `json:"units,omitempty"` -} diff --git a/internal/sdk/pkg/models/shared/sourcepinterest.go b/internal/sdk/pkg/models/shared/sourcepinterest.go index bce74dd30..840fe1737 100755 --- a/internal/sdk/pkg/models/shared/sourcepinterest.go +++ b/internal/sdk/pkg/models/shared/sourcepinterest.go @@ -109,21 +109,21 @@ func CreateSourcePinterestAuthorizationMethodSourcePinterestAuthorizationMethodA func (u *SourcePinterestAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourcePinterestAuthorizationMethodOAuth20 := new(SourcePinterestAuthorizationMethodOAuth20) + sourcePinterestAuthorizationMethodAccessToken := new(SourcePinterestAuthorizationMethodAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePinterestAuthorizationMethodOAuth20); err == nil { - u.SourcePinterestAuthorizationMethodOAuth20 = sourcePinterestAuthorizationMethodOAuth20 - u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodOAuth20 + if err := d.Decode(&sourcePinterestAuthorizationMethodAccessToken); err == nil { + u.SourcePinterestAuthorizationMethodAccessToken = sourcePinterestAuthorizationMethodAccessToken + u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodAccessToken return nil } - sourcePinterestAuthorizationMethodAccessToken := new(SourcePinterestAuthorizationMethodAccessToken) + sourcePinterestAuthorizationMethodOAuth20 := new(SourcePinterestAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePinterestAuthorizationMethodAccessToken); err == nil { - u.SourcePinterestAuthorizationMethodAccessToken = sourcePinterestAuthorizationMethodAccessToken - u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodAccessToken + if err := d.Decode(&sourcePinterestAuthorizationMethodOAuth20); err == nil { + u.SourcePinterestAuthorizationMethodOAuth20 = sourcePinterestAuthorizationMethodOAuth20 + u.Type = SourcePinterestAuthorizationMethodTypeSourcePinterestAuthorizationMethodOAuth20 return nil } @@ -131,14 +131,14 @@ func (u *SourcePinterestAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourcePinterestAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourcePinterestAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourcePinterestAuthorizationMethodOAuth20) - } - if u.SourcePinterestAuthorizationMethodAccessToken != nil { return json.Marshal(u.SourcePinterestAuthorizationMethodAccessToken) } + if u.SourcePinterestAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourcePinterestAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcepinterestupdate.go b/internal/sdk/pkg/models/shared/sourcepinterestupdate.go index 961ba1c18..50d3ed725 100755 --- a/internal/sdk/pkg/models/shared/sourcepinterestupdate.go +++ b/internal/sdk/pkg/models/shared/sourcepinterestupdate.go @@ -109,21 +109,21 @@ func CreateSourcePinterestUpdateAuthorizationMethodSourcePinterestUpdateAuthoriz func (u *SourcePinterestUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourcePinterestUpdateAuthorizationMethodOAuth20 := new(SourcePinterestUpdateAuthorizationMethodOAuth20) + sourcePinterestUpdateAuthorizationMethodAccessToken := new(SourcePinterestUpdateAuthorizationMethodAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePinterestUpdateAuthorizationMethodOAuth20); err == nil { - u.SourcePinterestUpdateAuthorizationMethodOAuth20 = sourcePinterestUpdateAuthorizationMethodOAuth20 - u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourcePinterestUpdateAuthorizationMethodAccessToken); err == nil { + u.SourcePinterestUpdateAuthorizationMethodAccessToken = sourcePinterestUpdateAuthorizationMethodAccessToken + u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodAccessToken return nil } - sourcePinterestUpdateAuthorizationMethodAccessToken := new(SourcePinterestUpdateAuthorizationMethodAccessToken) + sourcePinterestUpdateAuthorizationMethodOAuth20 := new(SourcePinterestUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePinterestUpdateAuthorizationMethodAccessToken); err == nil { - u.SourcePinterestUpdateAuthorizationMethodAccessToken = sourcePinterestUpdateAuthorizationMethodAccessToken - u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodAccessToken + if err := d.Decode(&sourcePinterestUpdateAuthorizationMethodOAuth20); err == nil { + u.SourcePinterestUpdateAuthorizationMethodOAuth20 = sourcePinterestUpdateAuthorizationMethodOAuth20 + u.Type = SourcePinterestUpdateAuthorizationMethodTypeSourcePinterestUpdateAuthorizationMethodOAuth20 return nil } @@ -131,14 +131,14 @@ func (u *SourcePinterestUpdateAuthorizationMethod) UnmarshalJSON(data []byte) er } func (u SourcePinterestUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourcePinterestUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourcePinterestUpdateAuthorizationMethodOAuth20) - } - if u.SourcePinterestUpdateAuthorizationMethodAccessToken != nil { return json.Marshal(u.SourcePinterestUpdateAuthorizationMethodAccessToken) } + if u.SourcePinterestUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourcePinterestUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcepostgres.go b/internal/sdk/pkg/models/shared/sourcepostgres.go index 6326dbe9e..6b4574ec4 100755 --- a/internal/sdk/pkg/models/shared/sourcepostgres.go +++ b/internal/sdk/pkg/models/shared/sourcepostgres.go @@ -9,48 +9,77 @@ import ( "fmt" ) -type SourcePostgresReplicationMethodStandardMethod string +type SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod string const ( - SourcePostgresReplicationMethodStandardMethodStandard SourcePostgresReplicationMethodStandardMethod = "Standard" + SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod = "Standard" ) -func (e SourcePostgresReplicationMethodStandardMethod) ToPointer() *SourcePostgresReplicationMethodStandardMethod { +func (e SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod { return &e } -func (e *SourcePostgresReplicationMethodStandardMethod) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { case "Standard": - *e = SourcePostgresReplicationMethodStandardMethod(v) + *e = SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresReplicationMethodStandardMethod: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v) } } -// SourcePostgresReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. -type SourcePostgresReplicationMethodStandard struct { - Method SourcePostgresReplicationMethodStandardMethod `json:"method"` +// SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). +type SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor struct { + Method SourcePostgresUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"` } -// SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync. -type SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour string +type SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod string const ( - SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviourWhileReadingData SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "While reading Data" - SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "After loading Data in the destination" + SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethodXmin SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod = "Xmin" ) -func (e SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) ToPointer() *SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour { +func (e SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod) ToPointer() *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod { return &e } -func (e *SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "Xmin": + *e = SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod(v) + return nil + default: + return fmt.Errorf("invalid value for SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod: %v", v) + } +} + +// SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn - Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. +type SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn struct { + Method SourcePostgresUpdateMethodDetectChangesWithXminSystemColumnMethod `json:"method"` +} + +// SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync. +type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour string + +const ( + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourWhileReadingData SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "While reading Data" + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "After loading Data in the destination" +) + +func (e SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) ToPointer() *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour { + return &e +} + +func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err @@ -59,71 +88,71 @@ func (e *SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) case "While reading Data": fallthrough case "After loading Data in the destination": - *e = SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(v) + *e = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour: %v", v) } } -type SourcePostgresReplicationMethodLogicalReplicationCDCMethod string +type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod string const ( - SourcePostgresReplicationMethodLogicalReplicationCDCMethodCdc SourcePostgresReplicationMethodLogicalReplicationCDCMethod = "CDC" + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethodCdc SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod = "CDC" ) -func (e SourcePostgresReplicationMethodLogicalReplicationCDCMethod) ToPointer() *SourcePostgresReplicationMethodLogicalReplicationCDCMethod { +func (e SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) ToPointer() *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod { return &e } -func (e *SourcePostgresReplicationMethodLogicalReplicationCDCMethod) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { case "CDC": - *e = SourcePostgresReplicationMethodLogicalReplicationCDCMethod(v) + *e = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresReplicationMethodLogicalReplicationCDCMethod: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod: %v", v) } } -// SourcePostgresReplicationMethodLogicalReplicationCDCPlugin - A logical decoding plugin installed on the PostgreSQL server. -type SourcePostgresReplicationMethodLogicalReplicationCDCPlugin string +// SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin - A logical decoding plugin installed on the PostgreSQL server. +type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin string const ( - SourcePostgresReplicationMethodLogicalReplicationCDCPluginPgoutput SourcePostgresReplicationMethodLogicalReplicationCDCPlugin = "pgoutput" + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPluginPgoutput SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin = "pgoutput" ) -func (e SourcePostgresReplicationMethodLogicalReplicationCDCPlugin) ToPointer() *SourcePostgresReplicationMethodLogicalReplicationCDCPlugin { +func (e SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) ToPointer() *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin { return &e } -func (e *SourcePostgresReplicationMethodLogicalReplicationCDCPlugin) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { case "pgoutput": - *e = SourcePostgresReplicationMethodLogicalReplicationCDCPlugin(v) + *e = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresReplicationMethodLogicalReplicationCDCPlugin: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin: %v", v) } } -// SourcePostgresReplicationMethodLogicalReplicationCDC - Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. -type SourcePostgresReplicationMethodLogicalReplicationCDC struct { +// SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. +type SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC struct { // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"` // Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync. - LsnCommitBehaviour *SourcePostgresReplicationMethodLogicalReplicationCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"` - Method SourcePostgresReplicationMethodLogicalReplicationCDCMethod `json:"method"` + LsnCommitBehaviour *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"` + Method SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCMethod `json:"method"` // A logical decoding plugin installed on the PostgreSQL server. - Plugin *SourcePostgresReplicationMethodLogicalReplicationCDCPlugin `json:"plugin,omitempty"` + Plugin *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin `json:"plugin,omitempty"` // A Postgres publication used for consuming changes. Read about publications and replication identities. Publication string `json:"publication"` // The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful. @@ -133,15 +162,15 @@ type SourcePostgresReplicationMethodLogicalReplicationCDC struct { AdditionalProperties interface{} `json:"-"` } -type _SourcePostgresReplicationMethodLogicalReplicationCDC SourcePostgresReplicationMethodLogicalReplicationCDC +type _SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC -func (c *SourcePostgresReplicationMethodLogicalReplicationCDC) UnmarshalJSON(bs []byte) error { - data := _SourcePostgresReplicationMethodLogicalReplicationCDC{} +func (c *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) UnmarshalJSON(bs []byte) error { + data := _SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC{} if err := json.Unmarshal(bs, &data); err != nil { return err } - *c = SourcePostgresReplicationMethodLogicalReplicationCDC(data) + *c = SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC(data) additionalFields := make(map[string]interface{}) @@ -161,9 +190,9 @@ func (c *SourcePostgresReplicationMethodLogicalReplicationCDC) UnmarshalJSON(bs return nil } -func (c SourcePostgresReplicationMethodLogicalReplicationCDC) MarshalJSON() ([]byte, error) { +func (c SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) MarshalJSON() ([]byte, error) { out := map[string]interface{}{} - bs, err := json.Marshal(_SourcePostgresReplicationMethodLogicalReplicationCDC(c)) + bs, err := json.Marshal(_SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC(c)) if err != nil { return nil, err } @@ -184,122 +213,93 @@ func (c SourcePostgresReplicationMethodLogicalReplicationCDC) MarshalJSON() ([]b return json.Marshal(out) } -type SourcePostgresReplicationMethodStandardXminMethod string +type SourcePostgresUpdateMethodType string const ( - SourcePostgresReplicationMethodStandardXminMethodXmin SourcePostgresReplicationMethodStandardXminMethod = "Xmin" + SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodType = "source-postgres_Update Method_Read Changes using Write-Ahead Log (CDC)" + SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateMethodType = "source-postgres_Update Method_Detect Changes with Xmin System Column" + SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateMethodType = "source-postgres_Update Method_Scan Changes with User Defined Cursor" ) -func (e SourcePostgresReplicationMethodStandardXminMethod) ToPointer() *SourcePostgresReplicationMethodStandardXminMethod { - return &e -} - -func (e *SourcePostgresReplicationMethodStandardXminMethod) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "Xmin": - *e = SourcePostgresReplicationMethodStandardXminMethod(v) - return nil - default: - return fmt.Errorf("invalid value for SourcePostgresReplicationMethodStandardXminMethod: %v", v) - } -} - -// SourcePostgresReplicationMethodStandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. -type SourcePostgresReplicationMethodStandardXmin struct { - Method SourcePostgresReplicationMethodStandardXminMethod `json:"method"` -} - -type SourcePostgresReplicationMethodType string +type SourcePostgresUpdateMethod struct { + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC + SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn + SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor -const ( - SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodStandardXmin SourcePostgresReplicationMethodType = "source-postgres_Replication Method_Standard (Xmin)" - SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodLogicalReplicationCDC SourcePostgresReplicationMethodType = "source-postgres_Replication Method_Logical Replication (CDC)" - SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodStandard SourcePostgresReplicationMethodType = "source-postgres_Replication Method_Standard" -) - -type SourcePostgresReplicationMethod struct { - SourcePostgresReplicationMethodStandardXmin *SourcePostgresReplicationMethodStandardXmin - SourcePostgresReplicationMethodLogicalReplicationCDC *SourcePostgresReplicationMethodLogicalReplicationCDC - SourcePostgresReplicationMethodStandard *SourcePostgresReplicationMethodStandard - - Type SourcePostgresReplicationMethodType + Type SourcePostgresUpdateMethodType } -func CreateSourcePostgresReplicationMethodSourcePostgresReplicationMethodStandardXmin(sourcePostgresReplicationMethodStandardXmin SourcePostgresReplicationMethodStandardXmin) SourcePostgresReplicationMethod { - typ := SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodStandardXmin +func CreateSourcePostgresUpdateMethodSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC(sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) SourcePostgresUpdateMethod { + typ := SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC - return SourcePostgresReplicationMethod{ - SourcePostgresReplicationMethodStandardXmin: &sourcePostgresReplicationMethodStandardXmin, + return SourcePostgresUpdateMethod{ + SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC: &sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC, Type: typ, } } -func CreateSourcePostgresReplicationMethodSourcePostgresReplicationMethodLogicalReplicationCDC(sourcePostgresReplicationMethodLogicalReplicationCDC SourcePostgresReplicationMethodLogicalReplicationCDC) SourcePostgresReplicationMethod { - typ := SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodLogicalReplicationCDC +func CreateSourcePostgresUpdateMethodSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn(sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn) SourcePostgresUpdateMethod { + typ := SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn - return SourcePostgresReplicationMethod{ - SourcePostgresReplicationMethodLogicalReplicationCDC: &sourcePostgresReplicationMethodLogicalReplicationCDC, + return SourcePostgresUpdateMethod{ + SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn: &sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn, Type: typ, } } -func CreateSourcePostgresReplicationMethodSourcePostgresReplicationMethodStandard(sourcePostgresReplicationMethodStandard SourcePostgresReplicationMethodStandard) SourcePostgresReplicationMethod { - typ := SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodStandard +func CreateSourcePostgresUpdateMethodSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor(sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor) SourcePostgresUpdateMethod { + typ := SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor - return SourcePostgresReplicationMethod{ - SourcePostgresReplicationMethodStandard: &sourcePostgresReplicationMethodStandard, - Type: typ, + return SourcePostgresUpdateMethod{ + SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor: &sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor, + Type: typ, } } -func (u *SourcePostgresReplicationMethod) UnmarshalJSON(data []byte) error { +func (u *SourcePostgresUpdateMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourcePostgresReplicationMethodStandardXmin := new(SourcePostgresReplicationMethodStandardXmin) + sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn := new(SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePostgresReplicationMethodStandardXmin); err == nil { - u.SourcePostgresReplicationMethodStandardXmin = sourcePostgresReplicationMethodStandardXmin - u.Type = SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodStandardXmin + if err := d.Decode(&sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn); err == nil { + u.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn = sourcePostgresUpdateMethodDetectChangesWithXminSystemColumn + u.Type = SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodDetectChangesWithXminSystemColumn return nil } - sourcePostgresReplicationMethodLogicalReplicationCDC := new(SourcePostgresReplicationMethodLogicalReplicationCDC) + sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor := new(SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePostgresReplicationMethodLogicalReplicationCDC); err == nil { - u.SourcePostgresReplicationMethodLogicalReplicationCDC = sourcePostgresReplicationMethodLogicalReplicationCDC - u.Type = SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodLogicalReplicationCDC + if err := d.Decode(&sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor); err == nil { + u.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor = sourcePostgresUpdateMethodScanChangesWithUserDefinedCursor + u.Type = SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodScanChangesWithUserDefinedCursor return nil } - sourcePostgresReplicationMethodStandard := new(SourcePostgresReplicationMethodStandard) + sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC := new(SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePostgresReplicationMethodStandard); err == nil { - u.SourcePostgresReplicationMethodStandard = sourcePostgresReplicationMethodStandard - u.Type = SourcePostgresReplicationMethodTypeSourcePostgresReplicationMethodStandard + if err := d.Decode(&sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC); err == nil { + u.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC = sourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC + u.Type = SourcePostgresUpdateMethodTypeSourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC return nil } return errors.New("could not unmarshal into supported union types") } -func (u SourcePostgresReplicationMethod) MarshalJSON() ([]byte, error) { - if u.SourcePostgresReplicationMethodStandardXmin != nil { - return json.Marshal(u.SourcePostgresReplicationMethodStandardXmin) +func (u SourcePostgresUpdateMethod) MarshalJSON() ([]byte, error) { + if u.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn != nil { + return json.Marshal(u.SourcePostgresUpdateMethodDetectChangesWithXminSystemColumn) } - if u.SourcePostgresReplicationMethodLogicalReplicationCDC != nil { - return json.Marshal(u.SourcePostgresReplicationMethodLogicalReplicationCDC) + if u.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor != nil { + return json.Marshal(u.SourcePostgresUpdateMethodScanChangesWithUserDefinedCursor) } - if u.SourcePostgresReplicationMethodStandard != nil { - return json.Marshal(u.SourcePostgresReplicationMethodStandard) + if u.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC != nil { + return json.Marshal(u.SourcePostgresUpdateMethodReadChangesUsingWriteAheadLogCDC) } return nil, nil @@ -1179,8 +1179,8 @@ type SourcePostgres struct { Password *string `json:"password,omitempty"` // Port of the database. Port int64 `json:"port"` - // Replication method for extracting data from the database. - ReplicationMethod *SourcePostgresReplicationMethod `json:"replication_method,omitempty"` + // Configures how data is extracted from the database. + ReplicationMethod *SourcePostgresUpdateMethod `json:"replication_method,omitempty"` // The list of schemas (case sensitive) to sync from. Defaults to public. Schemas []string `json:"schemas,omitempty"` SourceType SourcePostgresPostgres `json:"sourceType"` diff --git a/internal/sdk/pkg/models/shared/sourcepostgresupdate.go b/internal/sdk/pkg/models/shared/sourcepostgresupdate.go index 0331199a8..e7f92d960 100755 --- a/internal/sdk/pkg/models/shared/sourcepostgresupdate.go +++ b/internal/sdk/pkg/models/shared/sourcepostgresupdate.go @@ -9,48 +9,77 @@ import ( "fmt" ) -type SourcePostgresUpdateReplicationMethodStandardMethod string +type SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod string const ( - SourcePostgresUpdateReplicationMethodStandardMethodStandard SourcePostgresUpdateReplicationMethodStandardMethod = "Standard" + SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethodStandard SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod = "Standard" ) -func (e SourcePostgresUpdateReplicationMethodStandardMethod) ToPointer() *SourcePostgresUpdateReplicationMethodStandardMethod { +func (e SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) ToPointer() *SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod { return &e } -func (e *SourcePostgresUpdateReplicationMethodStandardMethod) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { case "Standard": - *e = SourcePostgresUpdateReplicationMethodStandardMethod(v) + *e = SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresUpdateReplicationMethodStandardMethod: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod: %v", v) } } -// SourcePostgresUpdateReplicationMethodStandard - Standard replication requires no setup on the DB side but will not be able to represent deletions incrementally. -type SourcePostgresUpdateReplicationMethodStandard struct { - Method SourcePostgresUpdateReplicationMethodStandardMethod `json:"method"` +// SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor - Incrementally detects new inserts and updates using the cursor column chosen when configuring a connection (e.g. created_at, updated_at). +type SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor struct { + Method SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursorMethod `json:"method"` } -// SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync. -type SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour string +type SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod string const ( - SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviourWhileReadingData SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "While reading Data" - SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour = "After loading Data in the destination" + SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethodXmin SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod = "Xmin" ) -func (e SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) ToPointer() *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour { +func (e SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod) ToPointer() *SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod { return &e } -func (e *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "Xmin": + *e = SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod(v) + return nil + default: + return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod: %v", v) + } +} + +// SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn - Recommended - Incrementally reads new inserts and updates via Postgres Xmin system column. Only recommended for tables up to 500GB. +type SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn struct { + Method SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumnMethod `json:"method"` +} + +// SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour - Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync. +type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour string + +const ( + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourWhileReadingData SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "While reading Data" + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviourAfterLoadingDataInTheDestination SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour = "After loading Data in the destination" +) + +func (e SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) ToPointer() *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour { + return &e +} + +func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err @@ -59,71 +88,71 @@ func (e *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBeha case "While reading Data": fallthrough case "After loading Data in the destination": - *e = SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour(v) + *e = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour: %v", v) } } -type SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod string +type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod string const ( - SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethodCdc SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod = "CDC" + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethodCdc SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod = "CDC" ) -func (e SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod) ToPointer() *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod { +func (e SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) ToPointer() *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod { return &e } -func (e *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { case "CDC": - *e = SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod(v) + *e = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod: %v", v) } } -// SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin - A logical decoding plugin installed on the PostgreSQL server. -type SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin string +// SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin - A logical decoding plugin installed on the PostgreSQL server. +type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin string const ( - SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPluginPgoutput SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin = "pgoutput" + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPluginPgoutput SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin = "pgoutput" ) -func (e SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin) ToPointer() *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin { +func (e SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) ToPointer() *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin { return &e } -func (e *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin) UnmarshalJSON(data []byte) error { +func (e *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { case "pgoutput": - *e = SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin(v) + *e = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin(v) return nil default: - return fmt.Errorf("invalid value for SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin: %v", v) + return fmt.Errorf("invalid value for SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin: %v", v) } } -// SourcePostgresUpdateReplicationMethodLogicalReplicationCDC - Logical replication uses the Postgres write-ahead log (WAL) to detect inserts, updates, and deletes. This needs to be configured on the source database itself. Only available on Postgres 10 and above. Read the docs. -type SourcePostgresUpdateReplicationMethodLogicalReplicationCDC struct { +// SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC - Recommended - Incrementally reads new inserts, updates, and deletes using the Postgres write-ahead log (WAL). This needs to be configured on the source database itself. Recommended for tables of any size. +type SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC struct { // The amount of time the connector will wait when it launches to determine if there is new data to sync or not. Defaults to 300 seconds. Valid range: 120 seconds to 1200 seconds. Read about initial waiting time. InitialWaitingSeconds *int64 `json:"initial_waiting_seconds,omitempty"` // Determines when Airbtye should flush the LSN of processed WAL logs in the source database. `After loading Data in the destination` is default. If `While reading Data` is selected, in case of a downstream failure (while loading data into the destination), next sync would result in a full sync. - LsnCommitBehaviour *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"` - Method SourcePostgresUpdateReplicationMethodLogicalReplicationCDCMethod `json:"method"` + LsnCommitBehaviour *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCLSNCommitBehaviour `json:"lsn_commit_behaviour,omitempty"` + Method SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCMethod `json:"method"` // A logical decoding plugin installed on the PostgreSQL server. - Plugin *SourcePostgresUpdateReplicationMethodLogicalReplicationCDCPlugin `json:"plugin,omitempty"` + Plugin *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDCPlugin `json:"plugin,omitempty"` // A Postgres publication used for consuming changes. Read about publications and replication identities. Publication string `json:"publication"` // The size of the internal queue. This may interfere with memory consumption and efficiency of the connector, please be careful. @@ -133,15 +162,15 @@ type SourcePostgresUpdateReplicationMethodLogicalReplicationCDC struct { AdditionalProperties interface{} `json:"-"` } -type _SourcePostgresUpdateReplicationMethodLogicalReplicationCDC SourcePostgresUpdateReplicationMethodLogicalReplicationCDC +type _SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC -func (c *SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) UnmarshalJSON(bs []byte) error { - data := _SourcePostgresUpdateReplicationMethodLogicalReplicationCDC{} +func (c *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) UnmarshalJSON(bs []byte) error { + data := _SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC{} if err := json.Unmarshal(bs, &data); err != nil { return err } - *c = SourcePostgresUpdateReplicationMethodLogicalReplicationCDC(data) + *c = SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(data) additionalFields := make(map[string]interface{}) @@ -161,9 +190,9 @@ func (c *SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) UnmarshalJS return nil } -func (c SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) MarshalJSON() ([]byte, error) { +func (c SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) MarshalJSON() ([]byte, error) { out := map[string]interface{}{} - bs, err := json.Marshal(_SourcePostgresUpdateReplicationMethodLogicalReplicationCDC(c)) + bs, err := json.Marshal(_SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(c)) if err != nil { return nil, err } @@ -184,122 +213,93 @@ func (c SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) MarshalJSON( return json.Marshal(out) } -type SourcePostgresUpdateReplicationMethodStandardXminMethod string - -const ( - SourcePostgresUpdateReplicationMethodStandardXminMethodXmin SourcePostgresUpdateReplicationMethodStandardXminMethod = "Xmin" -) - -func (e SourcePostgresUpdateReplicationMethodStandardXminMethod) ToPointer() *SourcePostgresUpdateReplicationMethodStandardXminMethod { - return &e -} - -func (e *SourcePostgresUpdateReplicationMethodStandardXminMethod) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "Xmin": - *e = SourcePostgresUpdateReplicationMethodStandardXminMethod(v) - return nil - default: - return fmt.Errorf("invalid value for SourcePostgresUpdateReplicationMethodStandardXminMethod: %v", v) - } -} - -// SourcePostgresUpdateReplicationMethodStandardXmin - Xmin replication requires no setup on the DB side but will not be able to represent deletions incrementally. -type SourcePostgresUpdateReplicationMethodStandardXmin struct { - Method SourcePostgresUpdateReplicationMethodStandardXminMethod `json:"method"` -} - -type SourcePostgresUpdateReplicationMethodType string +type SourcePostgresUpdateUpdateMethodType string const ( - SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodStandardXmin SourcePostgresUpdateReplicationMethodType = "source-postgres-update_Replication Method_Standard (Xmin)" - SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodLogicalReplicationCDC SourcePostgresUpdateReplicationMethodType = "source-postgres-update_Replication Method_Logical Replication (CDC)" - SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodStandard SourcePostgresUpdateReplicationMethodType = "source-postgres-update_Replication Method_Standard" + SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Update Method_Read Changes using Write-Ahead Log (CDC)" + SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Update Method_Detect Changes with Xmin System Column" + SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateUpdateMethodType = "source-postgres-update_Update Method_Scan Changes with User Defined Cursor" ) -type SourcePostgresUpdateReplicationMethod struct { - SourcePostgresUpdateReplicationMethodStandardXmin *SourcePostgresUpdateReplicationMethodStandardXmin - SourcePostgresUpdateReplicationMethodLogicalReplicationCDC *SourcePostgresUpdateReplicationMethodLogicalReplicationCDC - SourcePostgresUpdateReplicationMethodStandard *SourcePostgresUpdateReplicationMethodStandard +type SourcePostgresUpdateUpdateMethod struct { + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC *SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC + SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn *SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn + SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor *SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor - Type SourcePostgresUpdateReplicationMethodType + Type SourcePostgresUpdateUpdateMethodType } -func CreateSourcePostgresUpdateReplicationMethodSourcePostgresUpdateReplicationMethodStandardXmin(sourcePostgresUpdateReplicationMethodStandardXmin SourcePostgresUpdateReplicationMethodStandardXmin) SourcePostgresUpdateReplicationMethod { - typ := SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodStandardXmin +func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC(sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) SourcePostgresUpdateUpdateMethod { + typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC - return SourcePostgresUpdateReplicationMethod{ - SourcePostgresUpdateReplicationMethodStandardXmin: &sourcePostgresUpdateReplicationMethodStandardXmin, + return SourcePostgresUpdateUpdateMethod{ + SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC: &sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC, Type: typ, } } -func CreateSourcePostgresUpdateReplicationMethodSourcePostgresUpdateReplicationMethodLogicalReplicationCDC(sourcePostgresUpdateReplicationMethodLogicalReplicationCDC SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) SourcePostgresUpdateReplicationMethod { - typ := SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodLogicalReplicationCDC +func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn(sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn) SourcePostgresUpdateUpdateMethod { + typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn - return SourcePostgresUpdateReplicationMethod{ - SourcePostgresUpdateReplicationMethodLogicalReplicationCDC: &sourcePostgresUpdateReplicationMethodLogicalReplicationCDC, + return SourcePostgresUpdateUpdateMethod{ + SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn: &sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn, Type: typ, } } -func CreateSourcePostgresUpdateReplicationMethodSourcePostgresUpdateReplicationMethodStandard(sourcePostgresUpdateReplicationMethodStandard SourcePostgresUpdateReplicationMethodStandard) SourcePostgresUpdateReplicationMethod { - typ := SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodStandard +func CreateSourcePostgresUpdateUpdateMethodSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor(sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor) SourcePostgresUpdateUpdateMethod { + typ := SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor - return SourcePostgresUpdateReplicationMethod{ - SourcePostgresUpdateReplicationMethodStandard: &sourcePostgresUpdateReplicationMethodStandard, + return SourcePostgresUpdateUpdateMethod{ + SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor: &sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor, Type: typ, } } -func (u *SourcePostgresUpdateReplicationMethod) UnmarshalJSON(data []byte) error { +func (u *SourcePostgresUpdateUpdateMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourcePostgresUpdateReplicationMethodStandardXmin := new(SourcePostgresUpdateReplicationMethodStandardXmin) + sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn := new(SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePostgresUpdateReplicationMethodStandardXmin); err == nil { - u.SourcePostgresUpdateReplicationMethodStandardXmin = sourcePostgresUpdateReplicationMethodStandardXmin - u.Type = SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodStandardXmin + if err := d.Decode(&sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn); err == nil { + u.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn = sourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn + u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn return nil } - sourcePostgresUpdateReplicationMethodLogicalReplicationCDC := new(SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) + sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor := new(SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePostgresUpdateReplicationMethodLogicalReplicationCDC); err == nil { - u.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC = sourcePostgresUpdateReplicationMethodLogicalReplicationCDC - u.Type = SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodLogicalReplicationCDC + if err := d.Decode(&sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor); err == nil { + u.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor = sourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor + u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor return nil } - sourcePostgresUpdateReplicationMethodStandard := new(SourcePostgresUpdateReplicationMethodStandard) + sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC := new(SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourcePostgresUpdateReplicationMethodStandard); err == nil { - u.SourcePostgresUpdateReplicationMethodStandard = sourcePostgresUpdateReplicationMethodStandard - u.Type = SourcePostgresUpdateReplicationMethodTypeSourcePostgresUpdateReplicationMethodStandard + if err := d.Decode(&sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC); err == nil { + u.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC = sourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC + u.Type = SourcePostgresUpdateUpdateMethodTypeSourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC return nil } return errors.New("could not unmarshal into supported union types") } -func (u SourcePostgresUpdateReplicationMethod) MarshalJSON() ([]byte, error) { - if u.SourcePostgresUpdateReplicationMethodStandardXmin != nil { - return json.Marshal(u.SourcePostgresUpdateReplicationMethodStandardXmin) +func (u SourcePostgresUpdateUpdateMethod) MarshalJSON() ([]byte, error) { + if u.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn != nil { + return json.Marshal(u.SourcePostgresUpdateUpdateMethodDetectChangesWithXminSystemColumn) } - if u.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC != nil { - return json.Marshal(u.SourcePostgresUpdateReplicationMethodLogicalReplicationCDC) + if u.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor != nil { + return json.Marshal(u.SourcePostgresUpdateUpdateMethodScanChangesWithUserDefinedCursor) } - if u.SourcePostgresUpdateReplicationMethodStandard != nil { - return json.Marshal(u.SourcePostgresUpdateReplicationMethodStandard) + if u.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC != nil { + return json.Marshal(u.SourcePostgresUpdateUpdateMethodReadChangesUsingWriteAheadLogCDC) } return nil, nil @@ -1155,8 +1155,8 @@ type SourcePostgresUpdate struct { Password *string `json:"password,omitempty"` // Port of the database. Port int64 `json:"port"` - // Replication method for extracting data from the database. - ReplicationMethod *SourcePostgresUpdateReplicationMethod `json:"replication_method,omitempty"` + // Configures how data is extracted from the database. + ReplicationMethod *SourcePostgresUpdateUpdateMethod `json:"replication_method,omitempty"` // The list of schemas (case sensitive) to sync from. Defaults to public. Schemas []string `json:"schemas,omitempty"` // SSL connection modes. diff --git a/internal/sdk/pkg/models/shared/sourceposthog.go b/internal/sdk/pkg/models/shared/sourceposthog.go index bf718502b..046b777cb 100755 --- a/internal/sdk/pkg/models/shared/sourceposthog.go +++ b/internal/sdk/pkg/models/shared/sourceposthog.go @@ -36,8 +36,10 @@ type SourcePosthog struct { // API Key. See the docs for information on how to generate this key. APIKey string `json:"api_key"` // Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com). - BaseURL *string `json:"base_url,omitempty"` - SourceType SourcePosthogPosthog `json:"sourceType"` + BaseURL *string `json:"base_url,omitempty"` + // Set lower value in case of failing long running sync of events stream. + EventsTimeStep *int64 `json:"events_time_step,omitempty"` + SourceType SourcePosthogPosthog `json:"sourceType"` // The date from which you'd like to replicate the data. Any data before this date will not be replicated. StartDate time.Time `json:"start_date"` } diff --git a/internal/sdk/pkg/models/shared/sourceposthogupdate.go b/internal/sdk/pkg/models/shared/sourceposthogupdate.go index 5e5eb730f..e39684f56 100755 --- a/internal/sdk/pkg/models/shared/sourceposthogupdate.go +++ b/internal/sdk/pkg/models/shared/sourceposthogupdate.go @@ -11,6 +11,8 @@ type SourcePosthogUpdate struct { APIKey string `json:"api_key"` // Base PostHog url. Defaults to PostHog Cloud (https://app.posthog.com). BaseURL *string `json:"base_url,omitempty"` + // Set lower value in case of failing long running sync of events stream. + EventsTimeStep *int64 `json:"events_time_step,omitempty"` // The date from which you'd like to replicate the data. Any data before this date will not be replicated. StartDate time.Time `json:"start_date"` } diff --git a/internal/sdk/pkg/models/shared/sourcepublicapis.go b/internal/sdk/pkg/models/shared/sourcepublicapis.go deleted file mode 100755 index deef71d1c..000000000 --- a/internal/sdk/pkg/models/shared/sourcepublicapis.go +++ /dev/null @@ -1,36 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -import ( - "encoding/json" - "fmt" -) - -type SourcePublicApisPublicApis string - -const ( - SourcePublicApisPublicApisPublicApis SourcePublicApisPublicApis = "public-apis" -) - -func (e SourcePublicApisPublicApis) ToPointer() *SourcePublicApisPublicApis { - return &e -} - -func (e *SourcePublicApisPublicApis) UnmarshalJSON(data []byte) error { - var v string - if err := json.Unmarshal(data, &v); err != nil { - return err - } - switch v { - case "public-apis": - *e = SourcePublicApisPublicApis(v) - return nil - default: - return fmt.Errorf("invalid value for SourcePublicApisPublicApis: %v", v) - } -} - -type SourcePublicApis struct { - SourceType SourcePublicApisPublicApis `json:"sourceType"` -} diff --git a/internal/sdk/pkg/models/shared/sourcepublicapiscreaterequest.go b/internal/sdk/pkg/models/shared/sourcepublicapiscreaterequest.go deleted file mode 100755 index fae5a845a..000000000 --- a/internal/sdk/pkg/models/shared/sourcepublicapiscreaterequest.go +++ /dev/null @@ -1,11 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -type SourcePublicApisCreateRequest struct { - Configuration SourcePublicApis `json:"configuration"` - Name string `json:"name"` - // Optional secretID obtained through the public API OAuth redirect flow. - SecretID *string `json:"secretId,omitempty"` - WorkspaceID string `json:"workspaceId"` -} diff --git a/internal/sdk/pkg/models/shared/sourcepublicapisputrequest.go b/internal/sdk/pkg/models/shared/sourcepublicapisputrequest.go deleted file mode 100755 index cec613311..000000000 --- a/internal/sdk/pkg/models/shared/sourcepublicapisputrequest.go +++ /dev/null @@ -1,9 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -type SourcePublicApisPutRequest struct { - Configuration SourcePublicApisUpdate `json:"configuration"` - Name string `json:"name"` - WorkspaceID string `json:"workspaceId"` -} diff --git a/internal/sdk/pkg/models/shared/sourcepublicapisupdate.go b/internal/sdk/pkg/models/shared/sourcepublicapisupdate.go deleted file mode 100755 index 742399c48..000000000 --- a/internal/sdk/pkg/models/shared/sourcepublicapisupdate.go +++ /dev/null @@ -1,6 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -type SourcePublicApisUpdate struct { -} diff --git a/internal/sdk/pkg/models/shared/sourceretently.go b/internal/sdk/pkg/models/shared/sourceretently.go index c35c76c3a..1f262efd5 100755 --- a/internal/sdk/pkg/models/shared/sourceretently.go +++ b/internal/sdk/pkg/models/shared/sourceretently.go @@ -206,21 +206,21 @@ func CreateSourceRetentlyAuthenticationMechanismSourceRetentlyAuthenticationMech func (u *SourceRetentlyAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth := new(SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) + sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken := new(SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth); err == nil { - u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth = sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth - u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth + if err := d.Decode(&sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken); err == nil { + u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken = sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken + u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken return nil } - sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken := new(SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken) + sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth := new(SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken); err == nil { - u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken = sourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken - u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken + if err := d.Decode(&sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth); err == nil { + u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth = sourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth + u.Type = SourceRetentlyAuthenticationMechanismTypeSourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth return nil } @@ -228,14 +228,14 @@ func (u *SourceRetentlyAuthenticationMechanism) UnmarshalJSON(data []byte) error } func (u SourceRetentlyAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil { - return json.Marshal(u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) - } - if u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken != nil { return json.Marshal(u.SourceRetentlyAuthenticationMechanismAuthenticateWithAPIToken) } + if u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil { + return json.Marshal(u.SourceRetentlyAuthenticationMechanismAuthenticateViaRetentlyOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceretentlyupdate.go b/internal/sdk/pkg/models/shared/sourceretentlyupdate.go index f863c1b3a..2b092a7fc 100755 --- a/internal/sdk/pkg/models/shared/sourceretentlyupdate.go +++ b/internal/sdk/pkg/models/shared/sourceretentlyupdate.go @@ -206,21 +206,21 @@ func CreateSourceRetentlyUpdateAuthenticationMechanismSourceRetentlyUpdateAuthen func (u *SourceRetentlyUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth := new(SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) + sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken := new(SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth); err == nil { - u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth = sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth - u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth + if err := d.Decode(&sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken); err == nil { + u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken = sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken + u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken return nil } - sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken := new(SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken) + sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth := new(SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken); err == nil { - u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken = sourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken - u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken + if err := d.Decode(&sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth); err == nil { + u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth = sourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth + u.Type = SourceRetentlyUpdateAuthenticationMechanismTypeSourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth return nil } @@ -228,14 +228,14 @@ func (u *SourceRetentlyUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) } func (u SourceRetentlyUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil { - return json.Marshal(u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) - } - if u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken != nil { return json.Marshal(u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateWithAPIToken) } + if u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth != nil { + return json.Marshal(u.SourceRetentlyUpdateAuthenticationMechanismAuthenticateViaRetentlyOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sources3.go b/internal/sdk/pkg/models/shared/sources3.go index d0c46a259..7fb39df61 100755 --- a/internal/sdk/pkg/models/shared/sources3.go +++ b/internal/sdk/pkg/models/shared/sources3.go @@ -246,12 +246,12 @@ func CreateSourceS3FileFormatSourceS3FileFormatJsonl(sourceS3FileFormatJsonl Sou func (u *SourceS3FileFormat) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceS3FileFormatCSV := new(SourceS3FileFormatCSV) + sourceS3FileFormatAvro := new(SourceS3FileFormatAvro) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceS3FileFormatCSV); err == nil { - u.SourceS3FileFormatCSV = sourceS3FileFormatCSV - u.Type = SourceS3FileFormatTypeSourceS3FileFormatCSV + if err := d.Decode(&sourceS3FileFormatAvro); err == nil { + u.SourceS3FileFormatAvro = sourceS3FileFormatAvro + u.Type = SourceS3FileFormatTypeSourceS3FileFormatAvro return nil } @@ -264,21 +264,21 @@ func (u *SourceS3FileFormat) UnmarshalJSON(data []byte) error { return nil } - sourceS3FileFormatAvro := new(SourceS3FileFormatAvro) + sourceS3FileFormatJsonl := new(SourceS3FileFormatJsonl) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceS3FileFormatAvro); err == nil { - u.SourceS3FileFormatAvro = sourceS3FileFormatAvro - u.Type = SourceS3FileFormatTypeSourceS3FileFormatAvro + if err := d.Decode(&sourceS3FileFormatJsonl); err == nil { + u.SourceS3FileFormatJsonl = sourceS3FileFormatJsonl + u.Type = SourceS3FileFormatTypeSourceS3FileFormatJsonl return nil } - sourceS3FileFormatJsonl := new(SourceS3FileFormatJsonl) + sourceS3FileFormatCSV := new(SourceS3FileFormatCSV) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceS3FileFormatJsonl); err == nil { - u.SourceS3FileFormatJsonl = sourceS3FileFormatJsonl - u.Type = SourceS3FileFormatTypeSourceS3FileFormatJsonl + if err := d.Decode(&sourceS3FileFormatCSV); err == nil { + u.SourceS3FileFormatCSV = sourceS3FileFormatCSV + u.Type = SourceS3FileFormatTypeSourceS3FileFormatCSV return nil } @@ -286,33 +286,33 @@ func (u *SourceS3FileFormat) UnmarshalJSON(data []byte) error { } func (u SourceS3FileFormat) MarshalJSON() ([]byte, error) { - if u.SourceS3FileFormatCSV != nil { - return json.Marshal(u.SourceS3FileFormatCSV) + if u.SourceS3FileFormatAvro != nil { + return json.Marshal(u.SourceS3FileFormatAvro) } if u.SourceS3FileFormatParquet != nil { return json.Marshal(u.SourceS3FileFormatParquet) } - if u.SourceS3FileFormatAvro != nil { - return json.Marshal(u.SourceS3FileFormatAvro) - } - if u.SourceS3FileFormatJsonl != nil { return json.Marshal(u.SourceS3FileFormatJsonl) } + if u.SourceS3FileFormatCSV != nil { + return json.Marshal(u.SourceS3FileFormatCSV) + } + return nil, nil } -// SourceS3S3AmazonWebServices - Use this to load files from S3 or S3-compatible services +// SourceS3S3AmazonWebServices - Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services type SourceS3S3AmazonWebServices struct { // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"` // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"` // Name of the S3 bucket where the file(s) exist. - Bucket string `json:"bucket"` + Bucket *string `json:"bucket,omitempty"` // Endpoint to an S3 compatible service. Leave empty to use AWS. Endpoint *string `json:"endpoint,omitempty"` // By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate. @@ -345,16 +345,555 @@ func (e *SourceS3S3) UnmarshalJSON(data []byte) error { } } +type SourceS3FileBasedStreamConfigFormatParquetFormatFiletype string + +const ( + SourceS3FileBasedStreamConfigFormatParquetFormatFiletypeParquet SourceS3FileBasedStreamConfigFormatParquetFormatFiletype = "parquet" +) + +func (e SourceS3FileBasedStreamConfigFormatParquetFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatParquetFormatFiletype { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatParquetFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "parquet": + *e = SourceS3FileBasedStreamConfigFormatParquetFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatParquetFormatFiletype: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3FileBasedStreamConfigFormatParquetFormat struct { + // Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. + DecimalAsFloat *bool `json:"decimal_as_float,omitempty"` + Filetype *SourceS3FileBasedStreamConfigFormatParquetFormatFiletype `json:"filetype,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype string + +const ( + SourceS3FileBasedStreamConfigFormatJsonlFormatFiletypeJsonl SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype = "jsonl" +) + +func (e SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "jsonl": + *e = SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3FileBasedStreamConfigFormatJsonlFormat struct { + Filetype *SourceS3FileBasedStreamConfigFormatJsonlFormatFiletype `json:"filetype,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatCSVFormatFiletype string + +const ( + SourceS3FileBasedStreamConfigFormatCSVFormatFiletypeCsv SourceS3FileBasedStreamConfigFormatCSVFormatFiletype = "csv" +) + +func (e SourceS3FileBasedStreamConfigFormatCSVFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatFiletype { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatCSVFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "csv": + *e = SourceS3FileBasedStreamConfigFormatCSVFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatFiletype: %v", v) + } +} + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType string + +const ( + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionTypeUserProvided SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType = "User Provided" +) + +func (e SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "User Provided": + *e = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided struct { + // The column names that will be used while emitting the CSV records + ColumnNames []string `json:"column_names"` + HeaderDefinitionType *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType `json:"header_definition_type,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType string + +const ( + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionTypeAutogenerated SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType = "Autogenerated" +) + +func (e SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "Autogenerated": + *e = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated struct { + HeaderDefinitionType *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType `json:"header_definition_type,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType string + +const ( + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionTypeFromCsv SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType = "From CSV" +) + +func (e SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "From CSV": + *e = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV struct { + HeaderDefinitionType *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType `json:"header_definition_type,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType string + +const ( + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_From CSV" + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_Autogenerated" + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_User Provided" +) + +type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct { + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + + Type SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType +} + +func CreateSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV(sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition { + typ := SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + + return SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: &sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV, + Type: typ, + } +} + +func CreateSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated(sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition { + typ := SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + + return SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: &sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated, + Type: typ, + } +} + +func CreateSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided(sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition { + typ := SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + + return SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: &sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided, + Type: typ, + } +} + +func (u *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV := new(SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV); err == nil { + u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + u.Type = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + return nil + } + + sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated := new(SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated); err == nil { + u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + u.Type = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + return nil + } + + sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided := new(SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided); err == nil { + u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = sourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + u.Type = SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) MarshalJSON() ([]byte, error) { + if u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) + } + + if u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) + } + + if u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) + } + + return nil, nil +} + +// SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType - How to infer the types of the columns. If none, inference default to strings. +type SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType string + +const ( + SourceS3FileBasedStreamConfigFormatCSVFormatInferenceTypeNone SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType = "None" + SourceS3FileBasedStreamConfigFormatCSVFormatInferenceTypePrimitiveTypesOnly SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType = "Primitive Types Only" +) + +func (e SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType) ToPointer() *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "None": + fallthrough + case "Primitive Types Only": + *e = SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3FileBasedStreamConfigFormatCSVFormat struct { + // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. + Delimiter *string `json:"delimiter,omitempty"` + // Whether two quotes in a quoted CSV value denote a single quote in the data. + DoubleQuote *bool `json:"double_quote,omitempty"` + // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. + Encoding *string `json:"encoding,omitempty"` + // The character used for escaping special characters. To disallow escaping, leave this field blank. + EscapeChar *string `json:"escape_char,omitempty"` + // A set of case-sensitive strings that should be interpreted as false values. + FalseValues []string `json:"false_values,omitempty"` + Filetype *SourceS3FileBasedStreamConfigFormatCSVFormatFiletype `json:"filetype,omitempty"` + // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + HeaderDefinition *SourceS3FileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `json:"header_definition,omitempty"` + // How to infer the types of the columns. If none, inference default to strings. + InferenceType *SourceS3FileBasedStreamConfigFormatCSVFormatInferenceType `json:"inference_type,omitempty"` + // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. + NullValues []string `json:"null_values,omitempty"` + // The character used for quoting CSV values. To disallow quoting, make this field blank. + QuoteChar *string `json:"quote_char,omitempty"` + // The number of rows to skip after the header row. + SkipRowsAfterHeader *int64 `json:"skip_rows_after_header,omitempty"` + // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. + SkipRowsBeforeHeader *int64 `json:"skip_rows_before_header,omitempty"` + // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. + StringsCanBeNull *bool `json:"strings_can_be_null,omitempty"` + // A set of case-sensitive strings that should be interpreted as true values. + TrueValues []string `json:"true_values,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatAvroFormatFiletype string + +const ( + SourceS3FileBasedStreamConfigFormatAvroFormatFiletypeAvro SourceS3FileBasedStreamConfigFormatAvroFormatFiletype = "avro" +) + +func (e SourceS3FileBasedStreamConfigFormatAvroFormatFiletype) ToPointer() *SourceS3FileBasedStreamConfigFormatAvroFormatFiletype { + return &e +} + +func (e *SourceS3FileBasedStreamConfigFormatAvroFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "avro": + *e = SourceS3FileBasedStreamConfigFormatAvroFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigFormatAvroFormatFiletype: %v", v) + } +} + +// SourceS3FileBasedStreamConfigFormatAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3FileBasedStreamConfigFormatAvroFormat struct { + // Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. + DoubleAsString *bool `json:"double_as_string,omitempty"` + Filetype *SourceS3FileBasedStreamConfigFormatAvroFormatFiletype `json:"filetype,omitempty"` +} + +type SourceS3FileBasedStreamConfigFormatType string + +const ( + SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatAvroFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_Avro Format" + SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatCSVFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_CSV Format" + SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatJsonlFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_Jsonl Format" + SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatParquetFormat SourceS3FileBasedStreamConfigFormatType = "source-s3_FileBasedStreamConfig_Format_Parquet Format" +) + +type SourceS3FileBasedStreamConfigFormat struct { + SourceS3FileBasedStreamConfigFormatAvroFormat *SourceS3FileBasedStreamConfigFormatAvroFormat + SourceS3FileBasedStreamConfigFormatCSVFormat *SourceS3FileBasedStreamConfigFormatCSVFormat + SourceS3FileBasedStreamConfigFormatJsonlFormat *SourceS3FileBasedStreamConfigFormatJsonlFormat + SourceS3FileBasedStreamConfigFormatParquetFormat *SourceS3FileBasedStreamConfigFormatParquetFormat + + Type SourceS3FileBasedStreamConfigFormatType +} + +func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatAvroFormat(sourceS3FileBasedStreamConfigFormatAvroFormat SourceS3FileBasedStreamConfigFormatAvroFormat) SourceS3FileBasedStreamConfigFormat { + typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatAvroFormat + + return SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatAvroFormat: &sourceS3FileBasedStreamConfigFormatAvroFormat, + Type: typ, + } +} + +func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatCSVFormat(sourceS3FileBasedStreamConfigFormatCSVFormat SourceS3FileBasedStreamConfigFormatCSVFormat) SourceS3FileBasedStreamConfigFormat { + typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatCSVFormat + + return SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatCSVFormat: &sourceS3FileBasedStreamConfigFormatCSVFormat, + Type: typ, + } +} + +func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatJsonlFormat(sourceS3FileBasedStreamConfigFormatJsonlFormat SourceS3FileBasedStreamConfigFormatJsonlFormat) SourceS3FileBasedStreamConfigFormat { + typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatJsonlFormat + + return SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatJsonlFormat: &sourceS3FileBasedStreamConfigFormatJsonlFormat, + Type: typ, + } +} + +func CreateSourceS3FileBasedStreamConfigFormatSourceS3FileBasedStreamConfigFormatParquetFormat(sourceS3FileBasedStreamConfigFormatParquetFormat SourceS3FileBasedStreamConfigFormatParquetFormat) SourceS3FileBasedStreamConfigFormat { + typ := SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatParquetFormat + + return SourceS3FileBasedStreamConfigFormat{ + SourceS3FileBasedStreamConfigFormatParquetFormat: &sourceS3FileBasedStreamConfigFormatParquetFormat, + Type: typ, + } +} + +func (u *SourceS3FileBasedStreamConfigFormat) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + sourceS3FileBasedStreamConfigFormatJsonlFormat := new(SourceS3FileBasedStreamConfigFormatJsonlFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatJsonlFormat); err == nil { + u.SourceS3FileBasedStreamConfigFormatJsonlFormat = sourceS3FileBasedStreamConfigFormatJsonlFormat + u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatJsonlFormat + return nil + } + + sourceS3FileBasedStreamConfigFormatAvroFormat := new(SourceS3FileBasedStreamConfigFormatAvroFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatAvroFormat); err == nil { + u.SourceS3FileBasedStreamConfigFormatAvroFormat = sourceS3FileBasedStreamConfigFormatAvroFormat + u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatAvroFormat + return nil + } + + sourceS3FileBasedStreamConfigFormatParquetFormat := new(SourceS3FileBasedStreamConfigFormatParquetFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatParquetFormat); err == nil { + u.SourceS3FileBasedStreamConfigFormatParquetFormat = sourceS3FileBasedStreamConfigFormatParquetFormat + u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatParquetFormat + return nil + } + + sourceS3FileBasedStreamConfigFormatCSVFormat := new(SourceS3FileBasedStreamConfigFormatCSVFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3FileBasedStreamConfigFormatCSVFormat); err == nil { + u.SourceS3FileBasedStreamConfigFormatCSVFormat = sourceS3FileBasedStreamConfigFormatCSVFormat + u.Type = SourceS3FileBasedStreamConfigFormatTypeSourceS3FileBasedStreamConfigFormatCSVFormat + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u SourceS3FileBasedStreamConfigFormat) MarshalJSON() ([]byte, error) { + if u.SourceS3FileBasedStreamConfigFormatJsonlFormat != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatJsonlFormat) + } + + if u.SourceS3FileBasedStreamConfigFormatAvroFormat != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatAvroFormat) + } + + if u.SourceS3FileBasedStreamConfigFormatParquetFormat != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatParquetFormat) + } + + if u.SourceS3FileBasedStreamConfigFormatCSVFormat != nil { + return json.Marshal(u.SourceS3FileBasedStreamConfigFormatCSVFormat) + } + + return nil, nil +} + +// SourceS3FileBasedStreamConfigValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. +type SourceS3FileBasedStreamConfigValidationPolicy string + +const ( + SourceS3FileBasedStreamConfigValidationPolicyEmitRecord SourceS3FileBasedStreamConfigValidationPolicy = "Emit Record" + SourceS3FileBasedStreamConfigValidationPolicySkipRecord SourceS3FileBasedStreamConfigValidationPolicy = "Skip Record" + SourceS3FileBasedStreamConfigValidationPolicyWaitForDiscover SourceS3FileBasedStreamConfigValidationPolicy = "Wait for Discover" +) + +func (e SourceS3FileBasedStreamConfigValidationPolicy) ToPointer() *SourceS3FileBasedStreamConfigValidationPolicy { + return &e +} + +func (e *SourceS3FileBasedStreamConfigValidationPolicy) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "Emit Record": + fallthrough + case "Skip Record": + fallthrough + case "Wait for Discover": + *e = SourceS3FileBasedStreamConfigValidationPolicy(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3FileBasedStreamConfigValidationPolicy: %v", v) + } +} + +type SourceS3FileBasedStreamConfig struct { + // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. + DaysToSyncIfHistoryIsFull *int64 `json:"days_to_sync_if_history_is_full,omitempty"` + // The data file type that is being extracted for a stream. + FileType string `json:"file_type"` + // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + Format *SourceS3FileBasedStreamConfigFormat `json:"format,omitempty"` + // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. + Globs []string `json:"globs,omitempty"` + // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. + InputSchema *string `json:"input_schema,omitempty"` + // The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. + LegacyPrefix *string `json:"legacy_prefix,omitempty"` + // The name of the stream. + Name string `json:"name"` + // The column or columns (for a composite key) that serves as the unique identifier of a record. + PrimaryKey *string `json:"primary_key,omitempty"` + // When enabled, syncs will not validate or structure records against the stream's schema. + Schemaless *bool `json:"schemaless,omitempty"` + // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + ValidationPolicy *SourceS3FileBasedStreamConfigValidationPolicy `json:"validation_policy,omitempty"` +} + +// SourceS3 - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes +// because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. type SourceS3 struct { - // The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. - Dataset string `json:"dataset"` - // The format of the files you'd like to replicate + // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. + AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"` + // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. + AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"` + // Name of the S3 bucket where the file(s) exist. + Bucket string `json:"bucket"` + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. + Dataset *string `json:"dataset,omitempty"` + // Endpoint to an S3 compatible service. Leave empty to use AWS. + Endpoint *string `json:"endpoint,omitempty"` + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate Format *SourceS3FileFormat `json:"format,omitempty"` - // A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. - PathPattern string `json:"path_pattern"` - // Use this to load files from S3 or S3-compatible services - Provider SourceS3S3AmazonWebServices `json:"provider"` - // Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. + PathPattern *string `json:"path_pattern,omitempty"` + // Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services + Provider *SourceS3S3AmazonWebServices `json:"provider,omitempty"` + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. Schema *string `json:"schema,omitempty"` SourceType SourceS3S3 `json:"sourceType"` + // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. + StartDate *time.Time `json:"start_date,omitempty"` + // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. + Streams []SourceS3FileBasedStreamConfig `json:"streams"` } diff --git a/internal/sdk/pkg/models/shared/sources3createrequest.go b/internal/sdk/pkg/models/shared/sources3createrequest.go index 892c66d26..1bec6ba14 100755 --- a/internal/sdk/pkg/models/shared/sources3createrequest.go +++ b/internal/sdk/pkg/models/shared/sources3createrequest.go @@ -3,6 +3,8 @@ package shared type SourceS3CreateRequest struct { + // NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes + // because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. Configuration SourceS3 `json:"configuration"` Name string `json:"name"` // Optional secretID obtained through the public API OAuth redirect flow. diff --git a/internal/sdk/pkg/models/shared/sources3putrequest.go b/internal/sdk/pkg/models/shared/sources3putrequest.go index d2e8068a4..e622dbcfb 100755 --- a/internal/sdk/pkg/models/shared/sources3putrequest.go +++ b/internal/sdk/pkg/models/shared/sources3putrequest.go @@ -3,6 +3,8 @@ package shared type SourceS3PutRequest struct { + // NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes + // because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. Configuration SourceS3Update `json:"configuration"` Name string `json:"name"` WorkspaceID string `json:"workspaceId"` diff --git a/internal/sdk/pkg/models/shared/sources3update.go b/internal/sdk/pkg/models/shared/sources3update.go index c6c7fac8d..322258501 100755 --- a/internal/sdk/pkg/models/shared/sources3update.go +++ b/internal/sdk/pkg/models/shared/sources3update.go @@ -246,12 +246,12 @@ func CreateSourceS3UpdateFileFormatSourceS3UpdateFileFormatJsonl(sourceS3UpdateF func (u *SourceS3UpdateFileFormat) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceS3UpdateFileFormatCSV := new(SourceS3UpdateFileFormatCSV) + sourceS3UpdateFileFormatAvro := new(SourceS3UpdateFileFormatAvro) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceS3UpdateFileFormatCSV); err == nil { - u.SourceS3UpdateFileFormatCSV = sourceS3UpdateFileFormatCSV - u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatCSV + if err := d.Decode(&sourceS3UpdateFileFormatAvro); err == nil { + u.SourceS3UpdateFileFormatAvro = sourceS3UpdateFileFormatAvro + u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatAvro return nil } @@ -264,21 +264,21 @@ func (u *SourceS3UpdateFileFormat) UnmarshalJSON(data []byte) error { return nil } - sourceS3UpdateFileFormatAvro := new(SourceS3UpdateFileFormatAvro) + sourceS3UpdateFileFormatJsonl := new(SourceS3UpdateFileFormatJsonl) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceS3UpdateFileFormatAvro); err == nil { - u.SourceS3UpdateFileFormatAvro = sourceS3UpdateFileFormatAvro - u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatAvro + if err := d.Decode(&sourceS3UpdateFileFormatJsonl); err == nil { + u.SourceS3UpdateFileFormatJsonl = sourceS3UpdateFileFormatJsonl + u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatJsonl return nil } - sourceS3UpdateFileFormatJsonl := new(SourceS3UpdateFileFormatJsonl) + sourceS3UpdateFileFormatCSV := new(SourceS3UpdateFileFormatCSV) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceS3UpdateFileFormatJsonl); err == nil { - u.SourceS3UpdateFileFormatJsonl = sourceS3UpdateFileFormatJsonl - u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatJsonl + if err := d.Decode(&sourceS3UpdateFileFormatCSV); err == nil { + u.SourceS3UpdateFileFormatCSV = sourceS3UpdateFileFormatCSV + u.Type = SourceS3UpdateFileFormatTypeSourceS3UpdateFileFormatCSV return nil } @@ -286,33 +286,33 @@ func (u *SourceS3UpdateFileFormat) UnmarshalJSON(data []byte) error { } func (u SourceS3UpdateFileFormat) MarshalJSON() ([]byte, error) { - if u.SourceS3UpdateFileFormatCSV != nil { - return json.Marshal(u.SourceS3UpdateFileFormatCSV) + if u.SourceS3UpdateFileFormatAvro != nil { + return json.Marshal(u.SourceS3UpdateFileFormatAvro) } if u.SourceS3UpdateFileFormatParquet != nil { return json.Marshal(u.SourceS3UpdateFileFormatParquet) } - if u.SourceS3UpdateFileFormatAvro != nil { - return json.Marshal(u.SourceS3UpdateFileFormatAvro) - } - if u.SourceS3UpdateFileFormatJsonl != nil { return json.Marshal(u.SourceS3UpdateFileFormatJsonl) } + if u.SourceS3UpdateFileFormatCSV != nil { + return json.Marshal(u.SourceS3UpdateFileFormatCSV) + } + return nil, nil } -// SourceS3UpdateS3AmazonWebServices - Use this to load files from S3 or S3-compatible services +// SourceS3UpdateS3AmazonWebServices - Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services type SourceS3UpdateS3AmazonWebServices struct { // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"` // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"` // Name of the S3 bucket where the file(s) exist. - Bucket string `json:"bucket"` + Bucket *string `json:"bucket,omitempty"` // Endpoint to an S3 compatible service. Leave empty to use AWS. Endpoint *string `json:"endpoint,omitempty"` // By providing a path-like prefix (e.g. myFolder/thisTable/) under which all the relevant files sit, we can optimize finding these in S3. This is optional but recommended if your bucket contains many folders/files which you don't need to replicate. @@ -321,15 +321,554 @@ type SourceS3UpdateS3AmazonWebServices struct { StartDate *time.Time `json:"start_date,omitempty"` } +type SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletypeParquet SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype = "parquet" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "parquet": + *e = SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatParquetFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3UpdateFileBasedStreamConfigFormatParquetFormat struct { + // Whether to convert decimal fields to floats. There is a loss of precision when converting decimals to floats, so this is not recommended. + DecimalAsFloat *bool `json:"decimal_as_float,omitempty"` + Filetype *SourceS3UpdateFileBasedStreamConfigFormatParquetFormatFiletype `json:"filetype,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletypeJsonl SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype = "jsonl" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "jsonl": + *e = SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat struct { + Filetype *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormatFiletype `json:"filetype,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletypeCsv SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype = "csv" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "csv": + *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype: %v", v) + } +} + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionTypeUserProvided SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType = "User Provided" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "User Provided": + *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided struct { + // The column names that will be used while emitting the CSV records + ColumnNames []string `json:"column_names"` + HeaderDefinitionType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvidedHeaderDefinitionType `json:"header_definition_type,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionTypeAutogenerated SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType = "Autogenerated" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "Autogenerated": + *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated struct { + HeaderDefinitionType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogeneratedHeaderDefinitionType `json:"header_definition_type,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionTypeFromCsv SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType = "From CSV" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "From CSV": + *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV - How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV struct { + HeaderDefinitionType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSVHeaderDefinitionType `json:"header_definition_type,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_From CSV" + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_Autogenerated" + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format_CSV Header Definition_User Provided" +) + +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition struct { + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + + Type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionType +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV(sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition { + typ := SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + + return SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV, + Type: typ, + } +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated(sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition { + typ := SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + + return SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated, + Type: typ, + } +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided(sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition { + typ := SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + + return SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided, + Type: typ, + } +} + +func (u *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV = sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + u.Type = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV + return nil + } + + sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated = sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + u.Type = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated + return nil + } + + sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided = sourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + u.Type = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition) MarshalJSON() ([]byte, error) { + if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionFromCSV) + } + + if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionAutogenerated) + } + + if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinitionUserProvided) + } + + return nil, nil +} + +// SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType - How to infer the types of the columns. If none, inference default to strings. +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceTypeNone SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType = "None" + SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceTypePrimitiveTypesOnly SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType = "Primitive Types Only" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "None": + fallthrough + case "Primitive Types Only": + *e = SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatCSVFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3UpdateFileBasedStreamConfigFormatCSVFormat struct { + // The character delimiting individual cells in the CSV data. This may only be a 1-character string. For tab-delimited data enter '\t'. + Delimiter *string `json:"delimiter,omitempty"` + // Whether two quotes in a quoted CSV value denote a single quote in the data. + DoubleQuote *bool `json:"double_quote,omitempty"` + // The character encoding of the CSV data. Leave blank to default to UTF8. See list of python encodings for allowable options. + Encoding *string `json:"encoding,omitempty"` + // The character used for escaping special characters. To disallow escaping, leave this field blank. + EscapeChar *string `json:"escape_char,omitempty"` + // A set of case-sensitive strings that should be interpreted as false values. + FalseValues []string `json:"false_values,omitempty"` + Filetype *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatFiletype `json:"filetype,omitempty"` + // How headers will be defined. `User Provided` assumes the CSV does not have a header row and uses the headers provided and `Autogenerated` assumes the CSV does not have a header row and the CDK will generate headers using for `f{i}` where `i` is the index starting from 0. Else, the default behavior is to use the header from the CSV file. If a user wants to autogenerate or provide column names for a CSV having headers, they can skip rows. + HeaderDefinition *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatCSVHeaderDefinition `json:"header_definition,omitempty"` + // How to infer the types of the columns. If none, inference default to strings. + InferenceType *SourceS3UpdateFileBasedStreamConfigFormatCSVFormatInferenceType `json:"inference_type,omitempty"` + // A set of case-sensitive strings that should be interpreted as null values. For example, if the value 'NA' should be interpreted as null, enter 'NA' in this field. + NullValues []string `json:"null_values,omitempty"` + // The character used for quoting CSV values. To disallow quoting, make this field blank. + QuoteChar *string `json:"quote_char,omitempty"` + // The number of rows to skip after the header row. + SkipRowsAfterHeader *int64 `json:"skip_rows_after_header,omitempty"` + // The number of rows to skip before the header row. For example, if the header row is on the 3rd row, enter 2 in this field. + SkipRowsBeforeHeader *int64 `json:"skip_rows_before_header,omitempty"` + // Whether strings can be interpreted as null values. If true, strings that match the null_values set will be interpreted as null. If false, strings that match the null_values set will be interpreted as the string itself. + StringsCanBeNull *bool `json:"strings_can_be_null,omitempty"` + // A set of case-sensitive strings that should be interpreted as true values. + TrueValues []string `json:"true_values,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletypeAvro SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype = "avro" +) + +func (e SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype) ToPointer() *SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "avro": + *e = SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype: %v", v) + } +} + +// SourceS3UpdateFileBasedStreamConfigFormatAvroFormat - The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. +type SourceS3UpdateFileBasedStreamConfigFormatAvroFormat struct { + // Whether to convert double fields to strings. This is recommended if you have decimal numbers with a high degree of precision because there can be a loss precision when handling floating point numbers. + DoubleAsString *bool `json:"double_as_string,omitempty"` + Filetype *SourceS3UpdateFileBasedStreamConfigFormatAvroFormatFiletype `json:"filetype,omitempty"` +} + +type SourceS3UpdateFileBasedStreamConfigFormatType string + +const ( + SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatAvroFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_Avro Format" + SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_CSV Format" + SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_Jsonl Format" + SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatParquetFormat SourceS3UpdateFileBasedStreamConfigFormatType = "source-s3-update_FileBasedStreamConfig_Format_Parquet Format" +) + +type SourceS3UpdateFileBasedStreamConfigFormat struct { + SourceS3UpdateFileBasedStreamConfigFormatAvroFormat *SourceS3UpdateFileBasedStreamConfigFormatAvroFormat + SourceS3UpdateFileBasedStreamConfigFormatCSVFormat *SourceS3UpdateFileBasedStreamConfigFormatCSVFormat + SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat *SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat + SourceS3UpdateFileBasedStreamConfigFormatParquetFormat *SourceS3UpdateFileBasedStreamConfigFormatParquetFormat + + Type SourceS3UpdateFileBasedStreamConfigFormatType +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatAvroFormat(sourceS3UpdateFileBasedStreamConfigFormatAvroFormat SourceS3UpdateFileBasedStreamConfigFormatAvroFormat) SourceS3UpdateFileBasedStreamConfigFormat { + typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatAvroFormat + + return SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatAvroFormat: &sourceS3UpdateFileBasedStreamConfigFormatAvroFormat, + Type: typ, + } +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatCSVFormat(sourceS3UpdateFileBasedStreamConfigFormatCSVFormat SourceS3UpdateFileBasedStreamConfigFormatCSVFormat) SourceS3UpdateFileBasedStreamConfigFormat { + typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormat + + return SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatCSVFormat: &sourceS3UpdateFileBasedStreamConfigFormatCSVFormat, + Type: typ, + } +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat(sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat) SourceS3UpdateFileBasedStreamConfigFormat { + typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat + + return SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat: &sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat, + Type: typ, + } +} + +func CreateSourceS3UpdateFileBasedStreamConfigFormatSourceS3UpdateFileBasedStreamConfigFormatParquetFormat(sourceS3UpdateFileBasedStreamConfigFormatParquetFormat SourceS3UpdateFileBasedStreamConfigFormatParquetFormat) SourceS3UpdateFileBasedStreamConfigFormat { + typ := SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatParquetFormat + + return SourceS3UpdateFileBasedStreamConfigFormat{ + SourceS3UpdateFileBasedStreamConfigFormatParquetFormat: &sourceS3UpdateFileBasedStreamConfigFormatParquetFormat, + Type: typ, + } +} + +func (u *SourceS3UpdateFileBasedStreamConfigFormat) UnmarshalJSON(data []byte) error { + var d *json.Decoder + + sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat := new(SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat = sourceS3UpdateFileBasedStreamConfigFormatJsonlFormat + u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatJsonlFormat + return nil + } + + sourceS3UpdateFileBasedStreamConfigFormatAvroFormat := new(SourceS3UpdateFileBasedStreamConfigFormatAvroFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatAvroFormat); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat = sourceS3UpdateFileBasedStreamConfigFormatAvroFormat + u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatAvroFormat + return nil + } + + sourceS3UpdateFileBasedStreamConfigFormatParquetFormat := new(SourceS3UpdateFileBasedStreamConfigFormatParquetFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatParquetFormat); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat = sourceS3UpdateFileBasedStreamConfigFormatParquetFormat + u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatParquetFormat + return nil + } + + sourceS3UpdateFileBasedStreamConfigFormatCSVFormat := new(SourceS3UpdateFileBasedStreamConfigFormatCSVFormat) + d = json.NewDecoder(bytes.NewReader(data)) + d.DisallowUnknownFields() + if err := d.Decode(&sourceS3UpdateFileBasedStreamConfigFormatCSVFormat); err == nil { + u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat = sourceS3UpdateFileBasedStreamConfigFormatCSVFormat + u.Type = SourceS3UpdateFileBasedStreamConfigFormatTypeSourceS3UpdateFileBasedStreamConfigFormatCSVFormat + return nil + } + + return errors.New("could not unmarshal into supported union types") +} + +func (u SourceS3UpdateFileBasedStreamConfigFormat) MarshalJSON() ([]byte, error) { + if u.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatJsonlFormat) + } + + if u.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatAvroFormat) + } + + if u.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatParquetFormat) + } + + if u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat != nil { + return json.Marshal(u.SourceS3UpdateFileBasedStreamConfigFormatCSVFormat) + } + + return nil, nil +} + +// SourceS3UpdateFileBasedStreamConfigValidationPolicy - The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. +type SourceS3UpdateFileBasedStreamConfigValidationPolicy string + +const ( + SourceS3UpdateFileBasedStreamConfigValidationPolicyEmitRecord SourceS3UpdateFileBasedStreamConfigValidationPolicy = "Emit Record" + SourceS3UpdateFileBasedStreamConfigValidationPolicySkipRecord SourceS3UpdateFileBasedStreamConfigValidationPolicy = "Skip Record" + SourceS3UpdateFileBasedStreamConfigValidationPolicyWaitForDiscover SourceS3UpdateFileBasedStreamConfigValidationPolicy = "Wait for Discover" +) + +func (e SourceS3UpdateFileBasedStreamConfigValidationPolicy) ToPointer() *SourceS3UpdateFileBasedStreamConfigValidationPolicy { + return &e +} + +func (e *SourceS3UpdateFileBasedStreamConfigValidationPolicy) UnmarshalJSON(data []byte) error { + var v string + if err := json.Unmarshal(data, &v); err != nil { + return err + } + switch v { + case "Emit Record": + fallthrough + case "Skip Record": + fallthrough + case "Wait for Discover": + *e = SourceS3UpdateFileBasedStreamConfigValidationPolicy(v) + return nil + default: + return fmt.Errorf("invalid value for SourceS3UpdateFileBasedStreamConfigValidationPolicy: %v", v) + } +} + +type SourceS3UpdateFileBasedStreamConfig struct { + // When the state history of the file store is full, syncs will only read files that were last modified in the provided day range. + DaysToSyncIfHistoryIsFull *int64 `json:"days_to_sync_if_history_is_full,omitempty"` + // The data file type that is being extracted for a stream. + FileType string `json:"file_type"` + // The configuration options that are used to alter how to read incoming files that deviate from the standard formatting. + Format *SourceS3UpdateFileBasedStreamConfigFormat `json:"format,omitempty"` + // The pattern used to specify which files should be selected from the file system. For more information on glob pattern matching look here. + Globs []string `json:"globs,omitempty"` + // The schema that will be used to validate records extracted from the file. This will override the stream schema that is auto-detected from incoming files. + InputSchema *string `json:"input_schema,omitempty"` + // The path prefix configured in v3 versions of the S3 connector. This option is deprecated in favor of a single glob. + LegacyPrefix *string `json:"legacy_prefix,omitempty"` + // The name of the stream. + Name string `json:"name"` + // The column or columns (for a composite key) that serves as the unique identifier of a record. + PrimaryKey *string `json:"primary_key,omitempty"` + // When enabled, syncs will not validate or structure records against the stream's schema. + Schemaless *bool `json:"schemaless,omitempty"` + // The name of the validation policy that dictates sync behavior when a record does not adhere to the stream schema. + ValidationPolicy *SourceS3UpdateFileBasedStreamConfigValidationPolicy `json:"validation_policy,omitempty"` +} + +// SourceS3Update - NOTE: When this Spec is changed, legacy_config_transformer.py must also be modified to uptake the changes +// because it is responsible for converting legacy S3 v3 configs into v4 configs using the File-Based CDK. type SourceS3Update struct { - // The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. - Dataset string `json:"dataset"` - // The format of the files you'd like to replicate + // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. + AwsAccessKeyID *string `json:"aws_access_key_id,omitempty"` + // In order to access private Buckets stored on AWS S3, this connector requires credentials with the proper permissions. If accessing publicly available data, this field is not necessary. + AwsSecretAccessKey *string `json:"aws_secret_access_key,omitempty"` + // Name of the S3 bucket where the file(s) exist. + Bucket string `json:"bucket"` + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.name instead. The name of the stream you would like this source to output. Can contain letters, numbers, or underscores. + Dataset *string `json:"dataset,omitempty"` + // Endpoint to an S3 compatible service. Leave empty to use AWS. + Endpoint *string `json:"endpoint,omitempty"` + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.format instead. The format of the files you'd like to replicate Format *SourceS3UpdateFileFormat `json:"format,omitempty"` - // A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. - PathPattern string `json:"path_pattern"` - // Use this to load files from S3 or S3-compatible services - Provider SourceS3UpdateS3AmazonWebServices `json:"provider"` - // Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.globs instead. A regular expression which tells the connector which files to replicate. All files which match this pattern will be replicated. Use | to separate multiple patterns. See this page to understand pattern syntax (GLOBSTAR and SPLIT flags are enabled). Use pattern ** to pick up all files. + PathPattern *string `json:"path_pattern,omitempty"` + // Deprecated and will be removed soon. Please do not use this field anymore and use bucket, aws_access_key_id, aws_secret_access_key and endpoint instead. Use this to load files from S3 or S3-compatible services + Provider *SourceS3UpdateS3AmazonWebServices `json:"provider,omitempty"` + // Deprecated and will be removed soon. Please do not use this field anymore and use streams.input_schema instead. Optionally provide a schema to enforce, as a valid JSON string. Ensure this is a mapping of { "column" : "type" }, where types are valid JSON Schema datatypes. Leave as {} to auto-infer the schema. Schema *string `json:"schema,omitempty"` + // UTC date and time in the format 2017-01-25T00:00:00.000000Z. Any file modified before this date will not be replicated. + StartDate *time.Time `json:"start_date,omitempty"` + // Each instance of this configuration defines a stream. Use this to define which files belong in the stream, their format, and how they should be parsed and validated. When sending data to warehouse destination such as Snowflake or BigQuery, each stream is a separate table. + Streams []SourceS3UpdateFileBasedStreamConfig `json:"streams"` } diff --git a/internal/sdk/pkg/models/shared/sourcesalesloft.go b/internal/sdk/pkg/models/shared/sourcesalesloft.go index 85801bad6..af5a5cf5c 100755 --- a/internal/sdk/pkg/models/shared/sourcesalesloft.go +++ b/internal/sdk/pkg/models/shared/sourcesalesloft.go @@ -113,21 +113,21 @@ func CreateSourceSalesloftCredentialsSourceSalesloftCredentialsAuthenticateViaAP func (u *SourceSalesloftCredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSalesloftCredentialsAuthenticateViaOAuth := new(SourceSalesloftCredentialsAuthenticateViaOAuth) + sourceSalesloftCredentialsAuthenticateViaAPIKey := new(SourceSalesloftCredentialsAuthenticateViaAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSalesloftCredentialsAuthenticateViaOAuth); err == nil { - u.SourceSalesloftCredentialsAuthenticateViaOAuth = sourceSalesloftCredentialsAuthenticateViaOAuth - u.Type = SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaOAuth + if err := d.Decode(&sourceSalesloftCredentialsAuthenticateViaAPIKey); err == nil { + u.SourceSalesloftCredentialsAuthenticateViaAPIKey = sourceSalesloftCredentialsAuthenticateViaAPIKey + u.Type = SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaAPIKey return nil } - sourceSalesloftCredentialsAuthenticateViaAPIKey := new(SourceSalesloftCredentialsAuthenticateViaAPIKey) + sourceSalesloftCredentialsAuthenticateViaOAuth := new(SourceSalesloftCredentialsAuthenticateViaOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSalesloftCredentialsAuthenticateViaAPIKey); err == nil { - u.SourceSalesloftCredentialsAuthenticateViaAPIKey = sourceSalesloftCredentialsAuthenticateViaAPIKey - u.Type = SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaAPIKey + if err := d.Decode(&sourceSalesloftCredentialsAuthenticateViaOAuth); err == nil { + u.SourceSalesloftCredentialsAuthenticateViaOAuth = sourceSalesloftCredentialsAuthenticateViaOAuth + u.Type = SourceSalesloftCredentialsTypeSourceSalesloftCredentialsAuthenticateViaOAuth return nil } @@ -135,14 +135,14 @@ func (u *SourceSalesloftCredentials) UnmarshalJSON(data []byte) error { } func (u SourceSalesloftCredentials) MarshalJSON() ([]byte, error) { - if u.SourceSalesloftCredentialsAuthenticateViaOAuth != nil { - return json.Marshal(u.SourceSalesloftCredentialsAuthenticateViaOAuth) - } - if u.SourceSalesloftCredentialsAuthenticateViaAPIKey != nil { return json.Marshal(u.SourceSalesloftCredentialsAuthenticateViaAPIKey) } + if u.SourceSalesloftCredentialsAuthenticateViaOAuth != nil { + return json.Marshal(u.SourceSalesloftCredentialsAuthenticateViaOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go b/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go index 77570db00..3eac04137 100755 --- a/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go +++ b/internal/sdk/pkg/models/shared/sourcesalesloftupdate.go @@ -113,21 +113,21 @@ func CreateSourceSalesloftUpdateCredentialsSourceSalesloftUpdateCredentialsAuthe func (u *SourceSalesloftUpdateCredentials) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSalesloftUpdateCredentialsAuthenticateViaOAuth := new(SourceSalesloftUpdateCredentialsAuthenticateViaOAuth) + sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey := new(SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSalesloftUpdateCredentialsAuthenticateViaOAuth); err == nil { - u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth = sourceSalesloftUpdateCredentialsAuthenticateViaOAuth - u.Type = SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaOAuth + if err := d.Decode(&sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey); err == nil { + u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey = sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey + u.Type = SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey return nil } - sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey := new(SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey) + sourceSalesloftUpdateCredentialsAuthenticateViaOAuth := new(SourceSalesloftUpdateCredentialsAuthenticateViaOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey); err == nil { - u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey = sourceSalesloftUpdateCredentialsAuthenticateViaAPIKey - u.Type = SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaAPIKey + if err := d.Decode(&sourceSalesloftUpdateCredentialsAuthenticateViaOAuth); err == nil { + u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth = sourceSalesloftUpdateCredentialsAuthenticateViaOAuth + u.Type = SourceSalesloftUpdateCredentialsTypeSourceSalesloftUpdateCredentialsAuthenticateViaOAuth return nil } @@ -135,14 +135,14 @@ func (u *SourceSalesloftUpdateCredentials) UnmarshalJSON(data []byte) error { } func (u SourceSalesloftUpdateCredentials) MarshalJSON() ([]byte, error) { - if u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth != nil { - return json.Marshal(u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth) - } - if u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey != nil { return json.Marshal(u.SourceSalesloftUpdateCredentialsAuthenticateViaAPIKey) } + if u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth != nil { + return json.Marshal(u.SourceSalesloftUpdateCredentialsAuthenticateViaOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceshopify.go b/internal/sdk/pkg/models/shared/sourceshopify.go index 69943058c..4655cca84 100755 --- a/internal/sdk/pkg/models/shared/sourceshopify.go +++ b/internal/sdk/pkg/models/shared/sourceshopify.go @@ -10,100 +10,100 @@ import ( "fmt" ) -type SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod string +type SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod string const ( - SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethodOauth20 SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod = "oauth2.0" + SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethodAPIPassword SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod = "api_password" ) -func (e SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod { +func (e SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod) ToPointer() *SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod { return &e } -func (e *SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error { +func (e *SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "oauth2.0": - *e = SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod(v) + case "api_password": + *e = SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod: %v", v) + return fmt.Errorf("invalid value for SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod: %v", v) } } -// SourceShopifyShopifyAuthorizationMethodOAuth20 - OAuth2.0 -type SourceShopifyShopifyAuthorizationMethodOAuth20 struct { - // The Access Token for making authenticated requests. - AccessToken *string `json:"access_token,omitempty"` - AuthMethod SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod `json:"auth_method"` - // The Client ID of the Shopify developer application. - ClientID *string `json:"client_id,omitempty"` - // The Client Secret of the Shopify developer application. - ClientSecret *string `json:"client_secret,omitempty"` +// SourceShopifyShopifyAuthorizationMethodAPIPassword - API Password Auth +type SourceShopifyShopifyAuthorizationMethodAPIPassword struct { + // The API Password for your private application in the `Shopify` store. + APIPassword string `json:"api_password"` + AuthMethod SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod `json:"auth_method"` } -type SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod string +type SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod string const ( - SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethodAPIPassword SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod = "api_password" + SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethodOauth20 SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod = "oauth2.0" ) -func (e SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod) ToPointer() *SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod { +func (e SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod { return &e } -func (e *SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod) UnmarshalJSON(data []byte) error { +func (e *SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "api_password": - *e = SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod(v) + case "oauth2.0": + *e = SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod: %v", v) + return fmt.Errorf("invalid value for SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod: %v", v) } } -// SourceShopifyShopifyAuthorizationMethodAPIPassword - API Password Auth -type SourceShopifyShopifyAuthorizationMethodAPIPassword struct { - // The API Password for your private application in the `Shopify` store. - APIPassword string `json:"api_password"` - AuthMethod SourceShopifyShopifyAuthorizationMethodAPIPasswordAuthMethod `json:"auth_method"` +// SourceShopifyShopifyAuthorizationMethodOAuth20 - OAuth2.0 +type SourceShopifyShopifyAuthorizationMethodOAuth20 struct { + // The Access Token for making authenticated requests. + AccessToken *string `json:"access_token,omitempty"` + AuthMethod SourceShopifyShopifyAuthorizationMethodOAuth20AuthMethod `json:"auth_method"` + // The Client ID of the Shopify developer application. + ClientID *string `json:"client_id,omitempty"` + // The Client Secret of the Shopify developer application. + ClientSecret *string `json:"client_secret,omitempty"` } type SourceShopifyShopifyAuthorizationMethodType string const ( - SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodType = "source-shopify_Shopify Authorization Method_API Password" SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodOAuth20 SourceShopifyShopifyAuthorizationMethodType = "source-shopify_Shopify Authorization Method_OAuth2.0" + SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodType = "source-shopify_Shopify Authorization Method_API Password" ) type SourceShopifyShopifyAuthorizationMethod struct { - SourceShopifyShopifyAuthorizationMethodAPIPassword *SourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodOAuth20 *SourceShopifyShopifyAuthorizationMethodOAuth20 + SourceShopifyShopifyAuthorizationMethodAPIPassword *SourceShopifyShopifyAuthorizationMethodAPIPassword Type SourceShopifyShopifyAuthorizationMethodType } -func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyShopifyAuthorizationMethodAPIPassword(sourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodAPIPassword) SourceShopifyShopifyAuthorizationMethod { - typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword +func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyShopifyAuthorizationMethodOAuth20(sourceShopifyShopifyAuthorizationMethodOAuth20 SourceShopifyShopifyAuthorizationMethodOAuth20) SourceShopifyShopifyAuthorizationMethod { + typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodOAuth20 return SourceShopifyShopifyAuthorizationMethod{ - SourceShopifyShopifyAuthorizationMethodAPIPassword: &sourceShopifyShopifyAuthorizationMethodAPIPassword, + SourceShopifyShopifyAuthorizationMethodOAuth20: &sourceShopifyShopifyAuthorizationMethodOAuth20, Type: typ, } } -func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyShopifyAuthorizationMethodOAuth20(sourceShopifyShopifyAuthorizationMethodOAuth20 SourceShopifyShopifyAuthorizationMethodOAuth20) SourceShopifyShopifyAuthorizationMethod { - typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodOAuth20 +func CreateSourceShopifyShopifyAuthorizationMethodSourceShopifyShopifyAuthorizationMethodAPIPassword(sourceShopifyShopifyAuthorizationMethodAPIPassword SourceShopifyShopifyAuthorizationMethodAPIPassword) SourceShopifyShopifyAuthorizationMethod { + typ := SourceShopifyShopifyAuthorizationMethodTypeSourceShopifyShopifyAuthorizationMethodAPIPassword return SourceShopifyShopifyAuthorizationMethod{ - SourceShopifyShopifyAuthorizationMethodOAuth20: &sourceShopifyShopifyAuthorizationMethodOAuth20, + SourceShopifyShopifyAuthorizationMethodAPIPassword: &sourceShopifyShopifyAuthorizationMethodAPIPassword, Type: typ, } } @@ -175,5 +175,5 @@ type SourceShopify struct { Shop string `json:"shop"` SourceType SourceShopifyShopify `json:"sourceType"` // The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. - StartDate types.Date `json:"start_date"` + StartDate *types.Date `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourceshopifyupdate.go b/internal/sdk/pkg/models/shared/sourceshopifyupdate.go index c2d433bd2..29285ffe9 100755 --- a/internal/sdk/pkg/models/shared/sourceshopifyupdate.go +++ b/internal/sdk/pkg/models/shared/sourceshopifyupdate.go @@ -10,100 +10,100 @@ import ( "fmt" ) -type SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod string +type SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod string const ( - SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethodOauth20 SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod = "oauth2.0" + SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod = "api_password" ) -func (e SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod { +func (e SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod) ToPointer() *SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod { return &e } -func (e *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error { +func (e *SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "oauth2.0": - *e = SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod(v) + case "api_password": + *e = SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod: %v", v) + return fmt.Errorf("invalid value for SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod: %v", v) } } -// SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 - OAuth2.0 -type SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 struct { - // The Access Token for making authenticated requests. - AccessToken *string `json:"access_token,omitempty"` - AuthMethod SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod `json:"auth_method"` - // The Client ID of the Shopify developer application. - ClientID *string `json:"client_id,omitempty"` - // The Client Secret of the Shopify developer application. - ClientSecret *string `json:"client_secret,omitempty"` +// SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword - API Password Auth +type SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword struct { + // The API Password for your private application in the `Shopify` store. + APIPassword string `json:"api_password"` + AuthMethod SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod `json:"auth_method"` } -type SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod string +type SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod string const ( - SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod = "api_password" + SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethodOauth20 SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod = "oauth2.0" ) -func (e SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod) ToPointer() *SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod { +func (e SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod) ToPointer() *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod { return &e } -func (e *SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod) UnmarshalJSON(data []byte) error { +func (e *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod) UnmarshalJSON(data []byte) error { var v string if err := json.Unmarshal(data, &v); err != nil { return err } switch v { - case "api_password": - *e = SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod(v) + case "oauth2.0": + *e = SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod(v) return nil default: - return fmt.Errorf("invalid value for SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod: %v", v) + return fmt.Errorf("invalid value for SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod: %v", v) } } -// SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword - API Password Auth -type SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword struct { - // The API Password for your private application in the `Shopify` store. - APIPassword string `json:"api_password"` - AuthMethod SourceShopifyUpdateShopifyAuthorizationMethodAPIPasswordAuthMethod `json:"auth_method"` +// SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 - OAuth2.0 +type SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 struct { + // The Access Token for making authenticated requests. + AccessToken *string `json:"access_token,omitempty"` + AuthMethod SourceShopifyUpdateShopifyAuthorizationMethodOAuth20AuthMethod `json:"auth_method"` + // The Client ID of the Shopify developer application. + ClientID *string `json:"client_id,omitempty"` + // The Client Secret of the Shopify developer application. + ClientSecret *string `json:"client_secret,omitempty"` } type SourceShopifyUpdateShopifyAuthorizationMethodType string const ( - SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodType = "source-shopify-update_Shopify Authorization Method_API Password" SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodOAuth20 SourceShopifyUpdateShopifyAuthorizationMethodType = "source-shopify-update_Shopify Authorization Method_OAuth2.0" + SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodType = "source-shopify-update_Shopify Authorization Method_API Password" ) type SourceShopifyUpdateShopifyAuthorizationMethod struct { - SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 *SourceShopifyUpdateShopifyAuthorizationMethodOAuth20 + SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword *SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword Type SourceShopifyUpdateShopifyAuthorizationMethodType } -func CreateSourceShopifyUpdateShopifyAuthorizationMethodSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword(sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword) SourceShopifyUpdateShopifyAuthorizationMethod { - typ := SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword +func CreateSourceShopifyUpdateShopifyAuthorizationMethodSourceShopifyUpdateShopifyAuthorizationMethodOAuth20(sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 SourceShopifyUpdateShopifyAuthorizationMethodOAuth20) SourceShopifyUpdateShopifyAuthorizationMethod { + typ := SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodOAuth20 return SourceShopifyUpdateShopifyAuthorizationMethod{ - SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword: &sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword, + SourceShopifyUpdateShopifyAuthorizationMethodOAuth20: &sourceShopifyUpdateShopifyAuthorizationMethodOAuth20, Type: typ, } } -func CreateSourceShopifyUpdateShopifyAuthorizationMethodSourceShopifyUpdateShopifyAuthorizationMethodOAuth20(sourceShopifyUpdateShopifyAuthorizationMethodOAuth20 SourceShopifyUpdateShopifyAuthorizationMethodOAuth20) SourceShopifyUpdateShopifyAuthorizationMethod { - typ := SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodOAuth20 +func CreateSourceShopifyUpdateShopifyAuthorizationMethodSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword(sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword) SourceShopifyUpdateShopifyAuthorizationMethod { + typ := SourceShopifyUpdateShopifyAuthorizationMethodTypeSourceShopifyUpdateShopifyAuthorizationMethodAPIPassword return SourceShopifyUpdateShopifyAuthorizationMethod{ - SourceShopifyUpdateShopifyAuthorizationMethodOAuth20: &sourceShopifyUpdateShopifyAuthorizationMethodOAuth20, + SourceShopifyUpdateShopifyAuthorizationMethodAPIPassword: &sourceShopifyUpdateShopifyAuthorizationMethodAPIPassword, Type: typ, } } @@ -150,5 +150,5 @@ type SourceShopifyUpdate struct { // The name of your Shopify store found in the URL. For example, if your URL was https://NAME.myshopify.com, then the name would be 'NAME' or 'NAME.myshopify.com'. Shop string `json:"shop"` // The date you would like to replicate data from. Format: YYYY-MM-DD. Any data before this date will not be replicated. - StartDate types.Date `json:"start_date"` + StartDate *types.Date `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourceslack.go b/internal/sdk/pkg/models/shared/sourceslack.go index ca194c67a..f852d03f8 100755 --- a/internal/sdk/pkg/models/shared/sourceslack.go +++ b/internal/sdk/pkg/models/shared/sourceslack.go @@ -111,21 +111,21 @@ func CreateSourceSlackAuthenticationMechanismSourceSlackAuthenticationMechanismA func (u *SourceSlackAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSlackAuthenticationMechanismSignInViaSlackOAuth := new(SourceSlackAuthenticationMechanismSignInViaSlackOAuth) + sourceSlackAuthenticationMechanismAPIToken := new(SourceSlackAuthenticationMechanismAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSlackAuthenticationMechanismSignInViaSlackOAuth); err == nil { - u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth = sourceSlackAuthenticationMechanismSignInViaSlackOAuth - u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismSignInViaSlackOAuth + if err := d.Decode(&sourceSlackAuthenticationMechanismAPIToken); err == nil { + u.SourceSlackAuthenticationMechanismAPIToken = sourceSlackAuthenticationMechanismAPIToken + u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismAPIToken return nil } - sourceSlackAuthenticationMechanismAPIToken := new(SourceSlackAuthenticationMechanismAPIToken) + sourceSlackAuthenticationMechanismSignInViaSlackOAuth := new(SourceSlackAuthenticationMechanismSignInViaSlackOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSlackAuthenticationMechanismAPIToken); err == nil { - u.SourceSlackAuthenticationMechanismAPIToken = sourceSlackAuthenticationMechanismAPIToken - u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismAPIToken + if err := d.Decode(&sourceSlackAuthenticationMechanismSignInViaSlackOAuth); err == nil { + u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth = sourceSlackAuthenticationMechanismSignInViaSlackOAuth + u.Type = SourceSlackAuthenticationMechanismTypeSourceSlackAuthenticationMechanismSignInViaSlackOAuth return nil } @@ -133,14 +133,14 @@ func (u *SourceSlackAuthenticationMechanism) UnmarshalJSON(data []byte) error { } func (u SourceSlackAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth != nil { - return json.Marshal(u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth) - } - if u.SourceSlackAuthenticationMechanismAPIToken != nil { return json.Marshal(u.SourceSlackAuthenticationMechanismAPIToken) } + if u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth != nil { + return json.Marshal(u.SourceSlackAuthenticationMechanismSignInViaSlackOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourceslackupdate.go b/internal/sdk/pkg/models/shared/sourceslackupdate.go index 07a66c630..4809b8586 100755 --- a/internal/sdk/pkg/models/shared/sourceslackupdate.go +++ b/internal/sdk/pkg/models/shared/sourceslackupdate.go @@ -111,21 +111,21 @@ func CreateSourceSlackUpdateAuthenticationMechanismSourceSlackUpdateAuthenticati func (u *SourceSlackUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth := new(SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth) + sourceSlackUpdateAuthenticationMechanismAPIToken := new(SourceSlackUpdateAuthenticationMechanismAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth); err == nil { - u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth = sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth - u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth + if err := d.Decode(&sourceSlackUpdateAuthenticationMechanismAPIToken); err == nil { + u.SourceSlackUpdateAuthenticationMechanismAPIToken = sourceSlackUpdateAuthenticationMechanismAPIToken + u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismAPIToken return nil } - sourceSlackUpdateAuthenticationMechanismAPIToken := new(SourceSlackUpdateAuthenticationMechanismAPIToken) + sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth := new(SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSlackUpdateAuthenticationMechanismAPIToken); err == nil { - u.SourceSlackUpdateAuthenticationMechanismAPIToken = sourceSlackUpdateAuthenticationMechanismAPIToken - u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismAPIToken + if err := d.Decode(&sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth); err == nil { + u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth = sourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth + u.Type = SourceSlackUpdateAuthenticationMechanismTypeSourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth return nil } @@ -133,14 +133,14 @@ func (u *SourceSlackUpdateAuthenticationMechanism) UnmarshalJSON(data []byte) er } func (u SourceSlackUpdateAuthenticationMechanism) MarshalJSON() ([]byte, error) { - if u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth != nil { - return json.Marshal(u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth) - } - if u.SourceSlackUpdateAuthenticationMechanismAPIToken != nil { return json.Marshal(u.SourceSlackUpdateAuthenticationMechanismAPIToken) } + if u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth != nil { + return json.Marshal(u.SourceSlackUpdateAuthenticationMechanismSignInViaSlackOAuth) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesmartsheets.go b/internal/sdk/pkg/models/shared/sourcesmartsheets.go index 03ef07cc8..59cf3943b 100755 --- a/internal/sdk/pkg/models/shared/sourcesmartsheets.go +++ b/internal/sdk/pkg/models/shared/sourcesmartsheets.go @@ -113,21 +113,21 @@ func CreateSourceSmartsheetsAuthorizationMethodSourceSmartsheetsAuthorizationMet func (u *SourceSmartsheetsAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSmartsheetsAuthorizationMethodOAuth20 := new(SourceSmartsheetsAuthorizationMethodOAuth20) + sourceSmartsheetsAuthorizationMethodAPIAccessToken := new(SourceSmartsheetsAuthorizationMethodAPIAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSmartsheetsAuthorizationMethodOAuth20); err == nil { - u.SourceSmartsheetsAuthorizationMethodOAuth20 = sourceSmartsheetsAuthorizationMethodOAuth20 - u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodOAuth20 + if err := d.Decode(&sourceSmartsheetsAuthorizationMethodAPIAccessToken); err == nil { + u.SourceSmartsheetsAuthorizationMethodAPIAccessToken = sourceSmartsheetsAuthorizationMethodAPIAccessToken + u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodAPIAccessToken return nil } - sourceSmartsheetsAuthorizationMethodAPIAccessToken := new(SourceSmartsheetsAuthorizationMethodAPIAccessToken) + sourceSmartsheetsAuthorizationMethodOAuth20 := new(SourceSmartsheetsAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSmartsheetsAuthorizationMethodAPIAccessToken); err == nil { - u.SourceSmartsheetsAuthorizationMethodAPIAccessToken = sourceSmartsheetsAuthorizationMethodAPIAccessToken - u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodAPIAccessToken + if err := d.Decode(&sourceSmartsheetsAuthorizationMethodOAuth20); err == nil { + u.SourceSmartsheetsAuthorizationMethodOAuth20 = sourceSmartsheetsAuthorizationMethodOAuth20 + u.Type = SourceSmartsheetsAuthorizationMethodTypeSourceSmartsheetsAuthorizationMethodOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceSmartsheetsAuthorizationMethod) UnmarshalJSON(data []byte) error } func (u SourceSmartsheetsAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceSmartsheetsAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceSmartsheetsAuthorizationMethodOAuth20) - } - if u.SourceSmartsheetsAuthorizationMethodAPIAccessToken != nil { return json.Marshal(u.SourceSmartsheetsAuthorizationMethodAPIAccessToken) } + if u.SourceSmartsheetsAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceSmartsheetsAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go b/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go index 038173c04..bc026d94f 100755 --- a/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go +++ b/internal/sdk/pkg/models/shared/sourcesmartsheetsupdate.go @@ -113,21 +113,21 @@ func CreateSourceSmartsheetsUpdateAuthorizationMethodSourceSmartsheetsUpdateAuth func (u *SourceSmartsheetsUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSmartsheetsUpdateAuthorizationMethodOAuth20 := new(SourceSmartsheetsUpdateAuthorizationMethodOAuth20) + sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken := new(SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSmartsheetsUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 = sourceSmartsheetsUpdateAuthorizationMethodOAuth20 - u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken); err == nil { + u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken = sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken + u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken return nil } - sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken := new(SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken) + sourceSmartsheetsUpdateAuthorizationMethodOAuth20 := new(SourceSmartsheetsUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken); err == nil { - u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken = sourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken - u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken + if err := d.Decode(&sourceSmartsheetsUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 = sourceSmartsheetsUpdateAuthorizationMethodOAuth20 + u.Type = SourceSmartsheetsUpdateAuthorizationMethodTypeSourceSmartsheetsUpdateAuthorizationMethodOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceSmartsheetsUpdateAuthorizationMethod) UnmarshalJSON(data []byte) } func (u SourceSmartsheetsUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20) - } - if u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken != nil { return json.Marshal(u.SourceSmartsheetsUpdateAuthorizationMethodAPIAccessToken) } + if u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceSmartsheetsUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesnowflake.go b/internal/sdk/pkg/models/shared/sourcesnowflake.go index 59705451d..aa6aac316 100755 --- a/internal/sdk/pkg/models/shared/sourcesnowflake.go +++ b/internal/sdk/pkg/models/shared/sourcesnowflake.go @@ -112,21 +112,21 @@ func CreateSourceSnowflakeAuthorizationMethodSourceSnowflakeAuthorizationMethodU func (u *SourceSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSnowflakeAuthorizationMethodOAuth20 := new(SourceSnowflakeAuthorizationMethodOAuth20) + sourceSnowflakeAuthorizationMethodUsernameAndPassword := new(SourceSnowflakeAuthorizationMethodUsernameAndPassword) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSnowflakeAuthorizationMethodOAuth20); err == nil { - u.SourceSnowflakeAuthorizationMethodOAuth20 = sourceSnowflakeAuthorizationMethodOAuth20 - u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodOAuth20 + if err := d.Decode(&sourceSnowflakeAuthorizationMethodUsernameAndPassword); err == nil { + u.SourceSnowflakeAuthorizationMethodUsernameAndPassword = sourceSnowflakeAuthorizationMethodUsernameAndPassword + u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodUsernameAndPassword return nil } - sourceSnowflakeAuthorizationMethodUsernameAndPassword := new(SourceSnowflakeAuthorizationMethodUsernameAndPassword) + sourceSnowflakeAuthorizationMethodOAuth20 := new(SourceSnowflakeAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSnowflakeAuthorizationMethodUsernameAndPassword); err == nil { - u.SourceSnowflakeAuthorizationMethodUsernameAndPassword = sourceSnowflakeAuthorizationMethodUsernameAndPassword - u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodUsernameAndPassword + if err := d.Decode(&sourceSnowflakeAuthorizationMethodOAuth20); err == nil { + u.SourceSnowflakeAuthorizationMethodOAuth20 = sourceSnowflakeAuthorizationMethodOAuth20 + u.Type = SourceSnowflakeAuthorizationMethodTypeSourceSnowflakeAuthorizationMethodOAuth20 return nil } @@ -134,14 +134,14 @@ func (u *SourceSnowflakeAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceSnowflakeAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceSnowflakeAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceSnowflakeAuthorizationMethodOAuth20) - } - if u.SourceSnowflakeAuthorizationMethodUsernameAndPassword != nil { return json.Marshal(u.SourceSnowflakeAuthorizationMethodUsernameAndPassword) } + if u.SourceSnowflakeAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceSnowflakeAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go b/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go index 52ac3a4dc..41673aec4 100755 --- a/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go +++ b/internal/sdk/pkg/models/shared/sourcesnowflakeupdate.go @@ -112,21 +112,21 @@ func CreateSourceSnowflakeUpdateAuthorizationMethodSourceSnowflakeUpdateAuthoriz func (u *SourceSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSnowflakeUpdateAuthorizationMethodOAuth20 := new(SourceSnowflakeUpdateAuthorizationMethodOAuth20) + sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword := new(SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSnowflakeUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceSnowflakeUpdateAuthorizationMethodOAuth20 = sourceSnowflakeUpdateAuthorizationMethodOAuth20 - u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword); err == nil { + u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword = sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword + u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword return nil } - sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword := new(SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword) + sourceSnowflakeUpdateAuthorizationMethodOAuth20 := new(SourceSnowflakeUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword); err == nil { - u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword = sourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword - u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword + if err := d.Decode(&sourceSnowflakeUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceSnowflakeUpdateAuthorizationMethodOAuth20 = sourceSnowflakeUpdateAuthorizationMethodOAuth20 + u.Type = SourceSnowflakeUpdateAuthorizationMethodTypeSourceSnowflakeUpdateAuthorizationMethodOAuth20 return nil } @@ -134,14 +134,14 @@ func (u *SourceSnowflakeUpdateAuthorizationMethod) UnmarshalJSON(data []byte) er } func (u SourceSnowflakeUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceSnowflakeUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceSnowflakeUpdateAuthorizationMethodOAuth20) - } - if u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword != nil { return json.Marshal(u.SourceSnowflakeUpdateAuthorizationMethodUsernameAndPassword) } + if u.SourceSnowflakeUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceSnowflakeUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesquare.go b/internal/sdk/pkg/models/shared/sourcesquare.go index 4d8d0eeed..8cd8d96ad 100755 --- a/internal/sdk/pkg/models/shared/sourcesquare.go +++ b/internal/sdk/pkg/models/shared/sourcesquare.go @@ -111,21 +111,21 @@ func CreateSourceSquareAuthenticationSourceSquareAuthenticationAPIKey(sourceSqua func (u *SourceSquareAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSquareAuthenticationOauthAuthentication := new(SourceSquareAuthenticationOauthAuthentication) + sourceSquareAuthenticationAPIKey := new(SourceSquareAuthenticationAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSquareAuthenticationOauthAuthentication); err == nil { - u.SourceSquareAuthenticationOauthAuthentication = sourceSquareAuthenticationOauthAuthentication - u.Type = SourceSquareAuthenticationTypeSourceSquareAuthenticationOauthAuthentication + if err := d.Decode(&sourceSquareAuthenticationAPIKey); err == nil { + u.SourceSquareAuthenticationAPIKey = sourceSquareAuthenticationAPIKey + u.Type = SourceSquareAuthenticationTypeSourceSquareAuthenticationAPIKey return nil } - sourceSquareAuthenticationAPIKey := new(SourceSquareAuthenticationAPIKey) + sourceSquareAuthenticationOauthAuthentication := new(SourceSquareAuthenticationOauthAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSquareAuthenticationAPIKey); err == nil { - u.SourceSquareAuthenticationAPIKey = sourceSquareAuthenticationAPIKey - u.Type = SourceSquareAuthenticationTypeSourceSquareAuthenticationAPIKey + if err := d.Decode(&sourceSquareAuthenticationOauthAuthentication); err == nil { + u.SourceSquareAuthenticationOauthAuthentication = sourceSquareAuthenticationOauthAuthentication + u.Type = SourceSquareAuthenticationTypeSourceSquareAuthenticationOauthAuthentication return nil } @@ -133,14 +133,14 @@ func (u *SourceSquareAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceSquareAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceSquareAuthenticationOauthAuthentication != nil { - return json.Marshal(u.SourceSquareAuthenticationOauthAuthentication) - } - if u.SourceSquareAuthenticationAPIKey != nil { return json.Marshal(u.SourceSquareAuthenticationAPIKey) } + if u.SourceSquareAuthenticationOauthAuthentication != nil { + return json.Marshal(u.SourceSquareAuthenticationOauthAuthentication) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesquareupdate.go b/internal/sdk/pkg/models/shared/sourcesquareupdate.go index e0110494d..192eaf7e6 100755 --- a/internal/sdk/pkg/models/shared/sourcesquareupdate.go +++ b/internal/sdk/pkg/models/shared/sourcesquareupdate.go @@ -111,21 +111,21 @@ func CreateSourceSquareUpdateAuthenticationSourceSquareUpdateAuthenticationAPIKe func (u *SourceSquareUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceSquareUpdateAuthenticationOauthAuthentication := new(SourceSquareUpdateAuthenticationOauthAuthentication) + sourceSquareUpdateAuthenticationAPIKey := new(SourceSquareUpdateAuthenticationAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSquareUpdateAuthenticationOauthAuthentication); err == nil { - u.SourceSquareUpdateAuthenticationOauthAuthentication = sourceSquareUpdateAuthenticationOauthAuthentication - u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationOauthAuthentication + if err := d.Decode(&sourceSquareUpdateAuthenticationAPIKey); err == nil { + u.SourceSquareUpdateAuthenticationAPIKey = sourceSquareUpdateAuthenticationAPIKey + u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationAPIKey return nil } - sourceSquareUpdateAuthenticationAPIKey := new(SourceSquareUpdateAuthenticationAPIKey) + sourceSquareUpdateAuthenticationOauthAuthentication := new(SourceSquareUpdateAuthenticationOauthAuthentication) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceSquareUpdateAuthenticationAPIKey); err == nil { - u.SourceSquareUpdateAuthenticationAPIKey = sourceSquareUpdateAuthenticationAPIKey - u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationAPIKey + if err := d.Decode(&sourceSquareUpdateAuthenticationOauthAuthentication); err == nil { + u.SourceSquareUpdateAuthenticationOauthAuthentication = sourceSquareUpdateAuthenticationOauthAuthentication + u.Type = SourceSquareUpdateAuthenticationTypeSourceSquareUpdateAuthenticationOauthAuthentication return nil } @@ -133,14 +133,14 @@ func (u *SourceSquareUpdateAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceSquareUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceSquareUpdateAuthenticationOauthAuthentication != nil { - return json.Marshal(u.SourceSquareUpdateAuthenticationOauthAuthentication) - } - if u.SourceSquareUpdateAuthenticationAPIKey != nil { return json.Marshal(u.SourceSquareUpdateAuthenticationAPIKey) } + if u.SourceSquareUpdateAuthenticationOauthAuthentication != nil { + return json.Marshal(u.SourceSquareUpdateAuthenticationOauthAuthentication) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcesresponse.go b/internal/sdk/pkg/models/shared/sourcesresponse.go index 98b302a15..897114977 100755 --- a/internal/sdk/pkg/models/shared/sourcesresponse.go +++ b/internal/sdk/pkg/models/shared/sourcesresponse.go @@ -2,7 +2,6 @@ package shared -// SourcesResponse - Successful operation type SourcesResponse struct { Data []SourceResponse `json:"data"` Next *string `json:"next,omitempty"` diff --git a/internal/sdk/pkg/models/shared/sourcestripe.go b/internal/sdk/pkg/models/shared/sourcestripe.go index 8bf9453f6..ec167d261 100755 --- a/internal/sdk/pkg/models/shared/sourcestripe.go +++ b/internal/sdk/pkg/models/shared/sourcestripe.go @@ -37,11 +37,11 @@ type SourceStripe struct { AccountID string `json:"account_id"` // Stripe API key (usually starts with 'sk_live_'; find yours here). ClientSecret string `json:"client_secret"` - // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here + // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"` // The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted. SliceRange *int64 `json:"slice_range,omitempty"` SourceType SourceStripeStripe `json:"sourceType"` // UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. - StartDate time.Time `json:"start_date"` + StartDate *time.Time `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcestripeupdate.go b/internal/sdk/pkg/models/shared/sourcestripeupdate.go index 8f447746a..b4103b482 100755 --- a/internal/sdk/pkg/models/shared/sourcestripeupdate.go +++ b/internal/sdk/pkg/models/shared/sourcestripeupdate.go @@ -11,10 +11,10 @@ type SourceStripeUpdate struct { AccountID string `json:"account_id"` // Stripe API key (usually starts with 'sk_live_'; find yours here). ClientSecret string `json:"client_secret"` - // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here + // When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. Applies only to streams that do not support event-based incremental syncs: CheckoutSessionLineItems, Events, SetupAttempts, ShippingRates, BalanceTransactions, Files, FileLinks. More info here LookbackWindowDays *int64 `json:"lookback_window_days,omitempty"` // The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted. SliceRange *int64 `json:"slice_range,omitempty"` // UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated. - StartDate time.Time `json:"start_date"` + StartDate *time.Time `json:"start_date,omitempty"` } diff --git a/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go b/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go index 1d1a31854..87cf9ff32 100755 --- a/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go +++ b/internal/sdk/pkg/models/shared/sourcetiktokmarketing.go @@ -115,21 +115,21 @@ func CreateSourceTiktokMarketingAuthenticationMethodSourceTiktokMarketingAuthent func (u *SourceTiktokMarketingAuthenticationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceTiktokMarketingAuthenticationMethodOAuth20 := new(SourceTiktokMarketingAuthenticationMethodOAuth20) + sourceTiktokMarketingAuthenticationMethodSandboxAccessToken := new(SourceTiktokMarketingAuthenticationMethodSandboxAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTiktokMarketingAuthenticationMethodOAuth20); err == nil { - u.SourceTiktokMarketingAuthenticationMethodOAuth20 = sourceTiktokMarketingAuthenticationMethodOAuth20 - u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodOAuth20 + if err := d.Decode(&sourceTiktokMarketingAuthenticationMethodSandboxAccessToken); err == nil { + u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken = sourceTiktokMarketingAuthenticationMethodSandboxAccessToken + u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodSandboxAccessToken return nil } - sourceTiktokMarketingAuthenticationMethodSandboxAccessToken := new(SourceTiktokMarketingAuthenticationMethodSandboxAccessToken) + sourceTiktokMarketingAuthenticationMethodOAuth20 := new(SourceTiktokMarketingAuthenticationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTiktokMarketingAuthenticationMethodSandboxAccessToken); err == nil { - u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken = sourceTiktokMarketingAuthenticationMethodSandboxAccessToken - u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodSandboxAccessToken + if err := d.Decode(&sourceTiktokMarketingAuthenticationMethodOAuth20); err == nil { + u.SourceTiktokMarketingAuthenticationMethodOAuth20 = sourceTiktokMarketingAuthenticationMethodOAuth20 + u.Type = SourceTiktokMarketingAuthenticationMethodTypeSourceTiktokMarketingAuthenticationMethodOAuth20 return nil } @@ -137,14 +137,14 @@ func (u *SourceTiktokMarketingAuthenticationMethod) UnmarshalJSON(data []byte) e } func (u SourceTiktokMarketingAuthenticationMethod) MarshalJSON() ([]byte, error) { - if u.SourceTiktokMarketingAuthenticationMethodOAuth20 != nil { - return json.Marshal(u.SourceTiktokMarketingAuthenticationMethodOAuth20) - } - if u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken != nil { return json.Marshal(u.SourceTiktokMarketingAuthenticationMethodSandboxAccessToken) } + if u.SourceTiktokMarketingAuthenticationMethodOAuth20 != nil { + return json.Marshal(u.SourceTiktokMarketingAuthenticationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go b/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go index 59887dcac..721c98586 100755 --- a/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go +++ b/internal/sdk/pkg/models/shared/sourcetiktokmarketingupdate.go @@ -115,21 +115,21 @@ func CreateSourceTiktokMarketingUpdateAuthenticationMethodSourceTiktokMarketingU func (u *SourceTiktokMarketingUpdateAuthenticationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 := new(SourceTiktokMarketingUpdateAuthenticationMethodOAuth20) + sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken := new(SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTiktokMarketingUpdateAuthenticationMethodOAuth20); err == nil { - u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 = sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 - u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodOAuth20 + if err := d.Decode(&sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken); err == nil { + u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken = sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken + u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken return nil } - sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken := new(SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken) + sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 := new(SourceTiktokMarketingUpdateAuthenticationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken); err == nil { - u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken = sourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken - u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken + if err := d.Decode(&sourceTiktokMarketingUpdateAuthenticationMethodOAuth20); err == nil { + u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 = sourceTiktokMarketingUpdateAuthenticationMethodOAuth20 + u.Type = SourceTiktokMarketingUpdateAuthenticationMethodTypeSourceTiktokMarketingUpdateAuthenticationMethodOAuth20 return nil } @@ -137,14 +137,14 @@ func (u *SourceTiktokMarketingUpdateAuthenticationMethod) UnmarshalJSON(data []b } func (u SourceTiktokMarketingUpdateAuthenticationMethod) MarshalJSON() ([]byte, error) { - if u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 != nil { - return json.Marshal(u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20) - } - if u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken != nil { return json.Marshal(u.SourceTiktokMarketingUpdateAuthenticationMethodSandboxAccessToken) } + if u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20 != nil { + return json.Marshal(u.SourceTiktokMarketingUpdateAuthenticationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcetrustpilot.go b/internal/sdk/pkg/models/shared/sourcetrustpilot.go index 51a1eef0f..dbe483587 100755 --- a/internal/sdk/pkg/models/shared/sourcetrustpilot.go +++ b/internal/sdk/pkg/models/shared/sourcetrustpilot.go @@ -114,21 +114,21 @@ func CreateSourceTrustpilotAuthorizationMethodSourceTrustpilotAuthorizationMetho func (u *SourceTrustpilotAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceTrustpilotAuthorizationMethodOAuth20 := new(SourceTrustpilotAuthorizationMethodOAuth20) + sourceTrustpilotAuthorizationMethodAPIKey := new(SourceTrustpilotAuthorizationMethodAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTrustpilotAuthorizationMethodOAuth20); err == nil { - u.SourceTrustpilotAuthorizationMethodOAuth20 = sourceTrustpilotAuthorizationMethodOAuth20 - u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodOAuth20 + if err := d.Decode(&sourceTrustpilotAuthorizationMethodAPIKey); err == nil { + u.SourceTrustpilotAuthorizationMethodAPIKey = sourceTrustpilotAuthorizationMethodAPIKey + u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodAPIKey return nil } - sourceTrustpilotAuthorizationMethodAPIKey := new(SourceTrustpilotAuthorizationMethodAPIKey) + sourceTrustpilotAuthorizationMethodOAuth20 := new(SourceTrustpilotAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTrustpilotAuthorizationMethodAPIKey); err == nil { - u.SourceTrustpilotAuthorizationMethodAPIKey = sourceTrustpilotAuthorizationMethodAPIKey - u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodAPIKey + if err := d.Decode(&sourceTrustpilotAuthorizationMethodOAuth20); err == nil { + u.SourceTrustpilotAuthorizationMethodOAuth20 = sourceTrustpilotAuthorizationMethodOAuth20 + u.Type = SourceTrustpilotAuthorizationMethodTypeSourceTrustpilotAuthorizationMethodOAuth20 return nil } @@ -136,14 +136,14 @@ func (u *SourceTrustpilotAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceTrustpilotAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceTrustpilotAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceTrustpilotAuthorizationMethodOAuth20) - } - if u.SourceTrustpilotAuthorizationMethodAPIKey != nil { return json.Marshal(u.SourceTrustpilotAuthorizationMethodAPIKey) } + if u.SourceTrustpilotAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceTrustpilotAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go b/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go index cbc3d4f5a..15bf17aad 100755 --- a/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go +++ b/internal/sdk/pkg/models/shared/sourcetrustpilotupdate.go @@ -114,21 +114,21 @@ func CreateSourceTrustpilotUpdateAuthorizationMethodSourceTrustpilotUpdateAuthor func (u *SourceTrustpilotUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceTrustpilotUpdateAuthorizationMethodOAuth20 := new(SourceTrustpilotUpdateAuthorizationMethodOAuth20) + sourceTrustpilotUpdateAuthorizationMethodAPIKey := new(SourceTrustpilotUpdateAuthorizationMethodAPIKey) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTrustpilotUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceTrustpilotUpdateAuthorizationMethodOAuth20 = sourceTrustpilotUpdateAuthorizationMethodOAuth20 - u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceTrustpilotUpdateAuthorizationMethodAPIKey); err == nil { + u.SourceTrustpilotUpdateAuthorizationMethodAPIKey = sourceTrustpilotUpdateAuthorizationMethodAPIKey + u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodAPIKey return nil } - sourceTrustpilotUpdateAuthorizationMethodAPIKey := new(SourceTrustpilotUpdateAuthorizationMethodAPIKey) + sourceTrustpilotUpdateAuthorizationMethodOAuth20 := new(SourceTrustpilotUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTrustpilotUpdateAuthorizationMethodAPIKey); err == nil { - u.SourceTrustpilotUpdateAuthorizationMethodAPIKey = sourceTrustpilotUpdateAuthorizationMethodAPIKey - u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodAPIKey + if err := d.Decode(&sourceTrustpilotUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceTrustpilotUpdateAuthorizationMethodOAuth20 = sourceTrustpilotUpdateAuthorizationMethodOAuth20 + u.Type = SourceTrustpilotUpdateAuthorizationMethodTypeSourceTrustpilotUpdateAuthorizationMethodOAuth20 return nil } @@ -136,14 +136,14 @@ func (u *SourceTrustpilotUpdateAuthorizationMethod) UnmarshalJSON(data []byte) e } func (u SourceTrustpilotUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceTrustpilotUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceTrustpilotUpdateAuthorizationMethodOAuth20) - } - if u.SourceTrustpilotUpdateAuthorizationMethodAPIKey != nil { return json.Marshal(u.SourceTrustpilotUpdateAuthorizationMethodAPIKey) } + if u.SourceTrustpilotUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceTrustpilotUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcetypeform.go b/internal/sdk/pkg/models/shared/sourcetypeform.go index 6272d6e08..ee27b7786 100755 --- a/internal/sdk/pkg/models/shared/sourcetypeform.go +++ b/internal/sdk/pkg/models/shared/sourcetypeform.go @@ -113,21 +113,21 @@ func CreateSourceTypeformAuthorizationMethodSourceTypeformAuthorizationMethodPri func (u *SourceTypeformAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceTypeformAuthorizationMethodOAuth20 := new(SourceTypeformAuthorizationMethodOAuth20) + sourceTypeformAuthorizationMethodPrivateToken := new(SourceTypeformAuthorizationMethodPrivateToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTypeformAuthorizationMethodOAuth20); err == nil { - u.SourceTypeformAuthorizationMethodOAuth20 = sourceTypeformAuthorizationMethodOAuth20 - u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodOAuth20 + if err := d.Decode(&sourceTypeformAuthorizationMethodPrivateToken); err == nil { + u.SourceTypeformAuthorizationMethodPrivateToken = sourceTypeformAuthorizationMethodPrivateToken + u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodPrivateToken return nil } - sourceTypeformAuthorizationMethodPrivateToken := new(SourceTypeformAuthorizationMethodPrivateToken) + sourceTypeformAuthorizationMethodOAuth20 := new(SourceTypeformAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTypeformAuthorizationMethodPrivateToken); err == nil { - u.SourceTypeformAuthorizationMethodPrivateToken = sourceTypeformAuthorizationMethodPrivateToken - u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodPrivateToken + if err := d.Decode(&sourceTypeformAuthorizationMethodOAuth20); err == nil { + u.SourceTypeformAuthorizationMethodOAuth20 = sourceTypeformAuthorizationMethodOAuth20 + u.Type = SourceTypeformAuthorizationMethodTypeSourceTypeformAuthorizationMethodOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceTypeformAuthorizationMethod) UnmarshalJSON(data []byte) error { } func (u SourceTypeformAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceTypeformAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceTypeformAuthorizationMethodOAuth20) - } - if u.SourceTypeformAuthorizationMethodPrivateToken != nil { return json.Marshal(u.SourceTypeformAuthorizationMethodPrivateToken) } + if u.SourceTypeformAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceTypeformAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcetypeformupdate.go b/internal/sdk/pkg/models/shared/sourcetypeformupdate.go index 9ae0b63d4..b1eac3762 100755 --- a/internal/sdk/pkg/models/shared/sourcetypeformupdate.go +++ b/internal/sdk/pkg/models/shared/sourcetypeformupdate.go @@ -113,21 +113,21 @@ func CreateSourceTypeformUpdateAuthorizationMethodSourceTypeformUpdateAuthorizat func (u *SourceTypeformUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceTypeformUpdateAuthorizationMethodOAuth20 := new(SourceTypeformUpdateAuthorizationMethodOAuth20) + sourceTypeformUpdateAuthorizationMethodPrivateToken := new(SourceTypeformUpdateAuthorizationMethodPrivateToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTypeformUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceTypeformUpdateAuthorizationMethodOAuth20 = sourceTypeformUpdateAuthorizationMethodOAuth20 - u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceTypeformUpdateAuthorizationMethodPrivateToken); err == nil { + u.SourceTypeformUpdateAuthorizationMethodPrivateToken = sourceTypeformUpdateAuthorizationMethodPrivateToken + u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodPrivateToken return nil } - sourceTypeformUpdateAuthorizationMethodPrivateToken := new(SourceTypeformUpdateAuthorizationMethodPrivateToken) + sourceTypeformUpdateAuthorizationMethodOAuth20 := new(SourceTypeformUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceTypeformUpdateAuthorizationMethodPrivateToken); err == nil { - u.SourceTypeformUpdateAuthorizationMethodPrivateToken = sourceTypeformUpdateAuthorizationMethodPrivateToken - u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodPrivateToken + if err := d.Decode(&sourceTypeformUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceTypeformUpdateAuthorizationMethodOAuth20 = sourceTypeformUpdateAuthorizationMethodOAuth20 + u.Type = SourceTypeformUpdateAuthorizationMethodTypeSourceTypeformUpdateAuthorizationMethodOAuth20 return nil } @@ -135,14 +135,14 @@ func (u *SourceTypeformUpdateAuthorizationMethod) UnmarshalJSON(data []byte) err } func (u SourceTypeformUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceTypeformUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceTypeformUpdateAuthorizationMethodOAuth20) - } - if u.SourceTypeformUpdateAuthorizationMethodPrivateToken != nil { return json.Marshal(u.SourceTypeformUpdateAuthorizationMethodPrivateToken) } + if u.SourceTypeformUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceTypeformUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcezendeskchat.go b/internal/sdk/pkg/models/shared/sourcezendeskchat.go index 0b3e63871..dd193b085 100755 --- a/internal/sdk/pkg/models/shared/sourcezendeskchat.go +++ b/internal/sdk/pkg/models/shared/sourcezendeskchat.go @@ -111,21 +111,21 @@ func CreateSourceZendeskChatAuthorizationMethodSourceZendeskChatAuthorizationMet func (u *SourceZendeskChatAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceZendeskChatAuthorizationMethodOAuth20 := new(SourceZendeskChatAuthorizationMethodOAuth20) + sourceZendeskChatAuthorizationMethodAccessToken := new(SourceZendeskChatAuthorizationMethodAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskChatAuthorizationMethodOAuth20); err == nil { - u.SourceZendeskChatAuthorizationMethodOAuth20 = sourceZendeskChatAuthorizationMethodOAuth20 - u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodOAuth20 + if err := d.Decode(&sourceZendeskChatAuthorizationMethodAccessToken); err == nil { + u.SourceZendeskChatAuthorizationMethodAccessToken = sourceZendeskChatAuthorizationMethodAccessToken + u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodAccessToken return nil } - sourceZendeskChatAuthorizationMethodAccessToken := new(SourceZendeskChatAuthorizationMethodAccessToken) + sourceZendeskChatAuthorizationMethodOAuth20 := new(SourceZendeskChatAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskChatAuthorizationMethodAccessToken); err == nil { - u.SourceZendeskChatAuthorizationMethodAccessToken = sourceZendeskChatAuthorizationMethodAccessToken - u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodAccessToken + if err := d.Decode(&sourceZendeskChatAuthorizationMethodOAuth20); err == nil { + u.SourceZendeskChatAuthorizationMethodOAuth20 = sourceZendeskChatAuthorizationMethodOAuth20 + u.Type = SourceZendeskChatAuthorizationMethodTypeSourceZendeskChatAuthorizationMethodOAuth20 return nil } @@ -133,14 +133,14 @@ func (u *SourceZendeskChatAuthorizationMethod) UnmarshalJSON(data []byte) error } func (u SourceZendeskChatAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceZendeskChatAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceZendeskChatAuthorizationMethodOAuth20) - } - if u.SourceZendeskChatAuthorizationMethodAccessToken != nil { return json.Marshal(u.SourceZendeskChatAuthorizationMethodAccessToken) } + if u.SourceZendeskChatAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceZendeskChatAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go b/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go index 27aaa7c28..0aadd5f86 100755 --- a/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go +++ b/internal/sdk/pkg/models/shared/sourcezendeskchatupdate.go @@ -111,21 +111,21 @@ func CreateSourceZendeskChatUpdateAuthorizationMethodSourceZendeskChatUpdateAuth func (u *SourceZendeskChatUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceZendeskChatUpdateAuthorizationMethodOAuth20 := new(SourceZendeskChatUpdateAuthorizationMethodOAuth20) + sourceZendeskChatUpdateAuthorizationMethodAccessToken := new(SourceZendeskChatUpdateAuthorizationMethodAccessToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskChatUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceZendeskChatUpdateAuthorizationMethodOAuth20 = sourceZendeskChatUpdateAuthorizationMethodOAuth20 - u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceZendeskChatUpdateAuthorizationMethodAccessToken); err == nil { + u.SourceZendeskChatUpdateAuthorizationMethodAccessToken = sourceZendeskChatUpdateAuthorizationMethodAccessToken + u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodAccessToken return nil } - sourceZendeskChatUpdateAuthorizationMethodAccessToken := new(SourceZendeskChatUpdateAuthorizationMethodAccessToken) + sourceZendeskChatUpdateAuthorizationMethodOAuth20 := new(SourceZendeskChatUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskChatUpdateAuthorizationMethodAccessToken); err == nil { - u.SourceZendeskChatUpdateAuthorizationMethodAccessToken = sourceZendeskChatUpdateAuthorizationMethodAccessToken - u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodAccessToken + if err := d.Decode(&sourceZendeskChatUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceZendeskChatUpdateAuthorizationMethodOAuth20 = sourceZendeskChatUpdateAuthorizationMethodOAuth20 + u.Type = SourceZendeskChatUpdateAuthorizationMethodTypeSourceZendeskChatUpdateAuthorizationMethodOAuth20 return nil } @@ -133,14 +133,14 @@ func (u *SourceZendeskChatUpdateAuthorizationMethod) UnmarshalJSON(data []byte) } func (u SourceZendeskChatUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceZendeskChatUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceZendeskChatUpdateAuthorizationMethodOAuth20) - } - if u.SourceZendeskChatUpdateAuthorizationMethodAccessToken != nil { return json.Marshal(u.SourceZendeskChatUpdateAuthorizationMethodAccessToken) } + if u.SourceZendeskChatUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceZendeskChatUpdateAuthorizationMethodOAuth20) + } + return nil, nil } diff --git a/internal/sdk/pkg/models/shared/sourcezendesksunshine.go b/internal/sdk/pkg/models/shared/sourcezendesksunshine.go index c70396b06..5a06d7458 100755 --- a/internal/sdk/pkg/models/shared/sourcezendesksunshine.go +++ b/internal/sdk/pkg/models/shared/sourcezendesksunshine.go @@ -7,6 +7,7 @@ import ( "encoding/json" "errors" "fmt" + "time" ) type SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod string @@ -39,54 +40,6 @@ type SourceZendeskSunshineAuthorizationMethodAPIToken struct { AuthMethod SourceZendeskSunshineAuthorizationMethodAPITokenAuthMethod `json:"auth_method"` // The user email for your Zendesk account Email string `json:"email"` - - AdditionalProperties interface{} `json:"-"` -} -type _SourceZendeskSunshineAuthorizationMethodAPIToken SourceZendeskSunshineAuthorizationMethodAPIToken - -func (c *SourceZendeskSunshineAuthorizationMethodAPIToken) UnmarshalJSON(bs []byte) error { - data := _SourceZendeskSunshineAuthorizationMethodAPIToken{} - - if err := json.Unmarshal(bs, &data); err != nil { - return err - } - *c = SourceZendeskSunshineAuthorizationMethodAPIToken(data) - - additionalFields := make(map[string]interface{}) - - if err := json.Unmarshal(bs, &additionalFields); err != nil { - return err - } - delete(additionalFields, "api_token") - delete(additionalFields, "auth_method") - delete(additionalFields, "email") - - c.AdditionalProperties = additionalFields - - return nil -} - -func (c SourceZendeskSunshineAuthorizationMethodAPIToken) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{} - bs, err := json.Marshal(_SourceZendeskSunshineAuthorizationMethodAPIToken(c)) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - bs, err = json.Marshal(c.AdditionalProperties) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - return json.Marshal(out) } type SourceZendeskSunshineAuthorizationMethodOAuth20AuthMethod string @@ -121,55 +74,6 @@ type SourceZendeskSunshineAuthorizationMethodOAuth20 struct { ClientID string `json:"client_id"` // The Client Secret of your OAuth application. ClientSecret string `json:"client_secret"` - - AdditionalProperties interface{} `json:"-"` -} -type _SourceZendeskSunshineAuthorizationMethodOAuth20 SourceZendeskSunshineAuthorizationMethodOAuth20 - -func (c *SourceZendeskSunshineAuthorizationMethodOAuth20) UnmarshalJSON(bs []byte) error { - data := _SourceZendeskSunshineAuthorizationMethodOAuth20{} - - if err := json.Unmarshal(bs, &data); err != nil { - return err - } - *c = SourceZendeskSunshineAuthorizationMethodOAuth20(data) - - additionalFields := make(map[string]interface{}) - - if err := json.Unmarshal(bs, &additionalFields); err != nil { - return err - } - delete(additionalFields, "access_token") - delete(additionalFields, "auth_method") - delete(additionalFields, "client_id") - delete(additionalFields, "client_secret") - - c.AdditionalProperties = additionalFields - - return nil -} - -func (c SourceZendeskSunshineAuthorizationMethodOAuth20) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{} - bs, err := json.Marshal(_SourceZendeskSunshineAuthorizationMethodOAuth20(c)) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - bs, err = json.Marshal(c.AdditionalProperties) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - return json.Marshal(out) } type SourceZendeskSunshineAuthorizationMethodType string @@ -207,21 +111,21 @@ func CreateSourceZendeskSunshineAuthorizationMethodSourceZendeskSunshineAuthoriz func (u *SourceZendeskSunshineAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceZendeskSunshineAuthorizationMethodOAuth20 := new(SourceZendeskSunshineAuthorizationMethodOAuth20) + sourceZendeskSunshineAuthorizationMethodAPIToken := new(SourceZendeskSunshineAuthorizationMethodAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSunshineAuthorizationMethodOAuth20); err == nil { - u.SourceZendeskSunshineAuthorizationMethodOAuth20 = sourceZendeskSunshineAuthorizationMethodOAuth20 - u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodOAuth20 + if err := d.Decode(&sourceZendeskSunshineAuthorizationMethodAPIToken); err == nil { + u.SourceZendeskSunshineAuthorizationMethodAPIToken = sourceZendeskSunshineAuthorizationMethodAPIToken + u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodAPIToken return nil } - sourceZendeskSunshineAuthorizationMethodAPIToken := new(SourceZendeskSunshineAuthorizationMethodAPIToken) + sourceZendeskSunshineAuthorizationMethodOAuth20 := new(SourceZendeskSunshineAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSunshineAuthorizationMethodAPIToken); err == nil { - u.SourceZendeskSunshineAuthorizationMethodAPIToken = sourceZendeskSunshineAuthorizationMethodAPIToken - u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodAPIToken + if err := d.Decode(&sourceZendeskSunshineAuthorizationMethodOAuth20); err == nil { + u.SourceZendeskSunshineAuthorizationMethodOAuth20 = sourceZendeskSunshineAuthorizationMethodOAuth20 + u.Type = SourceZendeskSunshineAuthorizationMethodTypeSourceZendeskSunshineAuthorizationMethodOAuth20 return nil } @@ -229,14 +133,14 @@ func (u *SourceZendeskSunshineAuthorizationMethod) UnmarshalJSON(data []byte) er } func (u SourceZendeskSunshineAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceZendeskSunshineAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceZendeskSunshineAuthorizationMethodOAuth20) - } - if u.SourceZendeskSunshineAuthorizationMethodAPIToken != nil { return json.Marshal(u.SourceZendeskSunshineAuthorizationMethodAPIToken) } + if u.SourceZendeskSunshineAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceZendeskSunshineAuthorizationMethodOAuth20) + } + return nil, nil } @@ -268,7 +172,7 @@ type SourceZendeskSunshine struct { Credentials *SourceZendeskSunshineAuthorizationMethod `json:"credentials,omitempty"` SourceType SourceZendeskSunshineZendeskSunshine `json:"sourceType"` // The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z. - StartDate string `json:"start_date"` + StartDate time.Time `json:"start_date"` // The subdomain for your Zendesk Account. Subdomain string `json:"subdomain"` } diff --git a/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go b/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go index f54a7edcc..1866a08d2 100755 --- a/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go +++ b/internal/sdk/pkg/models/shared/sourcezendesksunshineupdate.go @@ -7,6 +7,7 @@ import ( "encoding/json" "errors" "fmt" + "time" ) type SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod string @@ -39,54 +40,6 @@ type SourceZendeskSunshineUpdateAuthorizationMethodAPIToken struct { AuthMethod SourceZendeskSunshineUpdateAuthorizationMethodAPITokenAuthMethod `json:"auth_method"` // The user email for your Zendesk account Email string `json:"email"` - - AdditionalProperties interface{} `json:"-"` -} -type _SourceZendeskSunshineUpdateAuthorizationMethodAPIToken SourceZendeskSunshineUpdateAuthorizationMethodAPIToken - -func (c *SourceZendeskSunshineUpdateAuthorizationMethodAPIToken) UnmarshalJSON(bs []byte) error { - data := _SourceZendeskSunshineUpdateAuthorizationMethodAPIToken{} - - if err := json.Unmarshal(bs, &data); err != nil { - return err - } - *c = SourceZendeskSunshineUpdateAuthorizationMethodAPIToken(data) - - additionalFields := make(map[string]interface{}) - - if err := json.Unmarshal(bs, &additionalFields); err != nil { - return err - } - delete(additionalFields, "api_token") - delete(additionalFields, "auth_method") - delete(additionalFields, "email") - - c.AdditionalProperties = additionalFields - - return nil -} - -func (c SourceZendeskSunshineUpdateAuthorizationMethodAPIToken) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{} - bs, err := json.Marshal(_SourceZendeskSunshineUpdateAuthorizationMethodAPIToken(c)) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - bs, err = json.Marshal(c.AdditionalProperties) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - return json.Marshal(out) } type SourceZendeskSunshineUpdateAuthorizationMethodOAuth20AuthMethod string @@ -121,55 +74,6 @@ type SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 struct { ClientID string `json:"client_id"` // The Client Secret of your OAuth application. ClientSecret string `json:"client_secret"` - - AdditionalProperties interface{} `json:"-"` -} -type _SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 - -func (c *SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) UnmarshalJSON(bs []byte) error { - data := _SourceZendeskSunshineUpdateAuthorizationMethodOAuth20{} - - if err := json.Unmarshal(bs, &data); err != nil { - return err - } - *c = SourceZendeskSunshineUpdateAuthorizationMethodOAuth20(data) - - additionalFields := make(map[string]interface{}) - - if err := json.Unmarshal(bs, &additionalFields); err != nil { - return err - } - delete(additionalFields, "access_token") - delete(additionalFields, "auth_method") - delete(additionalFields, "client_id") - delete(additionalFields, "client_secret") - - c.AdditionalProperties = additionalFields - - return nil -} - -func (c SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) MarshalJSON() ([]byte, error) { - out := map[string]interface{}{} - bs, err := json.Marshal(_SourceZendeskSunshineUpdateAuthorizationMethodOAuth20(c)) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - bs, err = json.Marshal(c.AdditionalProperties) - if err != nil { - return nil, err - } - - if err := json.Unmarshal([]byte(bs), &out); err != nil { - return nil, err - } - - return json.Marshal(out) } type SourceZendeskSunshineUpdateAuthorizationMethodType string @@ -207,21 +111,21 @@ func CreateSourceZendeskSunshineUpdateAuthorizationMethodSourceZendeskSunshineUp func (u *SourceZendeskSunshineUpdateAuthorizationMethod) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 := new(SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) + sourceZendeskSunshineUpdateAuthorizationMethodAPIToken := new(SourceZendeskSunshineUpdateAuthorizationMethodAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSunshineUpdateAuthorizationMethodOAuth20); err == nil { - u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 = sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 - u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodOAuth20 + if err := d.Decode(&sourceZendeskSunshineUpdateAuthorizationMethodAPIToken); err == nil { + u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken = sourceZendeskSunshineUpdateAuthorizationMethodAPIToken + u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodAPIToken return nil } - sourceZendeskSunshineUpdateAuthorizationMethodAPIToken := new(SourceZendeskSunshineUpdateAuthorizationMethodAPIToken) + sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 := new(SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSunshineUpdateAuthorizationMethodAPIToken); err == nil { - u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken = sourceZendeskSunshineUpdateAuthorizationMethodAPIToken - u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodAPIToken + if err := d.Decode(&sourceZendeskSunshineUpdateAuthorizationMethodOAuth20); err == nil { + u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 = sourceZendeskSunshineUpdateAuthorizationMethodOAuth20 + u.Type = SourceZendeskSunshineUpdateAuthorizationMethodTypeSourceZendeskSunshineUpdateAuthorizationMethodOAuth20 return nil } @@ -229,21 +133,21 @@ func (u *SourceZendeskSunshineUpdateAuthorizationMethod) UnmarshalJSON(data []by } func (u SourceZendeskSunshineUpdateAuthorizationMethod) MarshalJSON() ([]byte, error) { - if u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 != nil { - return json.Marshal(u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) - } - if u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken != nil { return json.Marshal(u.SourceZendeskSunshineUpdateAuthorizationMethodAPIToken) } + if u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20 != nil { + return json.Marshal(u.SourceZendeskSunshineUpdateAuthorizationMethodOAuth20) + } + return nil, nil } type SourceZendeskSunshineUpdate struct { Credentials *SourceZendeskSunshineUpdateAuthorizationMethod `json:"credentials,omitempty"` // The date from which you'd like to replicate data for Zendesk Sunshine API, in the format YYYY-MM-DDT00:00:00Z. - StartDate string `json:"start_date"` + StartDate time.Time `json:"start_date"` // The subdomain for your Zendesk Account. Subdomain string `json:"subdomain"` } diff --git a/internal/sdk/pkg/models/shared/sourcezendesksupport.go b/internal/sdk/pkg/models/shared/sourcezendesksupport.go index 4d1a61baa..e6712241c 100755 --- a/internal/sdk/pkg/models/shared/sourcezendesksupport.go +++ b/internal/sdk/pkg/models/shared/sourcezendesksupport.go @@ -210,21 +210,21 @@ func CreateSourceZendeskSupportAuthenticationSourceZendeskSupportAuthenticationA func (u *SourceZendeskSupportAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceZendeskSupportAuthenticationOAuth20 := new(SourceZendeskSupportAuthenticationOAuth20) + sourceZendeskSupportAuthenticationAPIToken := new(SourceZendeskSupportAuthenticationAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSupportAuthenticationOAuth20); err == nil { - u.SourceZendeskSupportAuthenticationOAuth20 = sourceZendeskSupportAuthenticationOAuth20 - u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationOAuth20 + if err := d.Decode(&sourceZendeskSupportAuthenticationAPIToken); err == nil { + u.SourceZendeskSupportAuthenticationAPIToken = sourceZendeskSupportAuthenticationAPIToken + u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationAPIToken return nil } - sourceZendeskSupportAuthenticationAPIToken := new(SourceZendeskSupportAuthenticationAPIToken) + sourceZendeskSupportAuthenticationOAuth20 := new(SourceZendeskSupportAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSupportAuthenticationAPIToken); err == nil { - u.SourceZendeskSupportAuthenticationAPIToken = sourceZendeskSupportAuthenticationAPIToken - u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationAPIToken + if err := d.Decode(&sourceZendeskSupportAuthenticationOAuth20); err == nil { + u.SourceZendeskSupportAuthenticationOAuth20 = sourceZendeskSupportAuthenticationOAuth20 + u.Type = SourceZendeskSupportAuthenticationTypeSourceZendeskSupportAuthenticationOAuth20 return nil } @@ -232,14 +232,14 @@ func (u *SourceZendeskSupportAuthentication) UnmarshalJSON(data []byte) error { } func (u SourceZendeskSupportAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceZendeskSupportAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceZendeskSupportAuthenticationOAuth20) - } - if u.SourceZendeskSupportAuthenticationAPIToken != nil { return json.Marshal(u.SourceZendeskSupportAuthenticationAPIToken) } + if u.SourceZendeskSupportAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceZendeskSupportAuthenticationOAuth20) + } + return nil, nil } @@ -274,7 +274,7 @@ type SourceZendeskSupport struct { IgnorePagination *bool `json:"ignore_pagination,omitempty"` SourceType SourceZendeskSupportZendeskSupport `json:"sourceType"` // The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. - StartDate time.Time `json:"start_date"` + StartDate *time.Time `json:"start_date,omitempty"` // This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain. Subdomain string `json:"subdomain"` } diff --git a/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go b/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go index 8883cedc0..0651cb035 100755 --- a/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go +++ b/internal/sdk/pkg/models/shared/sourcezendesksupportupdate.go @@ -210,21 +210,21 @@ func CreateSourceZendeskSupportUpdateAuthenticationSourceZendeskSupportUpdateAut func (u *SourceZendeskSupportUpdateAuthentication) UnmarshalJSON(data []byte) error { var d *json.Decoder - sourceZendeskSupportUpdateAuthenticationOAuth20 := new(SourceZendeskSupportUpdateAuthenticationOAuth20) + sourceZendeskSupportUpdateAuthenticationAPIToken := new(SourceZendeskSupportUpdateAuthenticationAPIToken) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSupportUpdateAuthenticationOAuth20); err == nil { - u.SourceZendeskSupportUpdateAuthenticationOAuth20 = sourceZendeskSupportUpdateAuthenticationOAuth20 - u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationOAuth20 + if err := d.Decode(&sourceZendeskSupportUpdateAuthenticationAPIToken); err == nil { + u.SourceZendeskSupportUpdateAuthenticationAPIToken = sourceZendeskSupportUpdateAuthenticationAPIToken + u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationAPIToken return nil } - sourceZendeskSupportUpdateAuthenticationAPIToken := new(SourceZendeskSupportUpdateAuthenticationAPIToken) + sourceZendeskSupportUpdateAuthenticationOAuth20 := new(SourceZendeskSupportUpdateAuthenticationOAuth20) d = json.NewDecoder(bytes.NewReader(data)) d.DisallowUnknownFields() - if err := d.Decode(&sourceZendeskSupportUpdateAuthenticationAPIToken); err == nil { - u.SourceZendeskSupportUpdateAuthenticationAPIToken = sourceZendeskSupportUpdateAuthenticationAPIToken - u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationAPIToken + if err := d.Decode(&sourceZendeskSupportUpdateAuthenticationOAuth20); err == nil { + u.SourceZendeskSupportUpdateAuthenticationOAuth20 = sourceZendeskSupportUpdateAuthenticationOAuth20 + u.Type = SourceZendeskSupportUpdateAuthenticationTypeSourceZendeskSupportUpdateAuthenticationOAuth20 return nil } @@ -232,14 +232,14 @@ func (u *SourceZendeskSupportUpdateAuthentication) UnmarshalJSON(data []byte) er } func (u SourceZendeskSupportUpdateAuthentication) MarshalJSON() ([]byte, error) { - if u.SourceZendeskSupportUpdateAuthenticationOAuth20 != nil { - return json.Marshal(u.SourceZendeskSupportUpdateAuthenticationOAuth20) - } - if u.SourceZendeskSupportUpdateAuthenticationAPIToken != nil { return json.Marshal(u.SourceZendeskSupportUpdateAuthenticationAPIToken) } + if u.SourceZendeskSupportUpdateAuthenticationOAuth20 != nil { + return json.Marshal(u.SourceZendeskSupportUpdateAuthenticationOAuth20) + } + return nil, nil } @@ -249,7 +249,7 @@ type SourceZendeskSupportUpdate struct { // Makes each stream read a single page of data. IgnorePagination *bool `json:"ignore_pagination,omitempty"` // The UTC date and time from which you'd like to replicate data, in the format YYYY-MM-DDT00:00:00Z. All data generated after this date will be replicated. - StartDate time.Time `json:"start_date"` + StartDate *time.Time `json:"start_date,omitempty"` // This is your unique Zendesk subdomain that can be found in your account URL. For example, in https://MY_SUBDOMAIN.zendesk.com/, MY_SUBDOMAIN is the value of your subdomain. Subdomain string `json:"subdomain"` } diff --git a/internal/sdk/pkg/models/shared/workspaceresponseroottypeforworkspaceresponse.go b/internal/sdk/pkg/models/shared/workspaceresponseroottypeforworkspaceresponse.go deleted file mode 100755 index eb834a617..000000000 --- a/internal/sdk/pkg/models/shared/workspaceresponseroottypeforworkspaceresponse.go +++ /dev/null @@ -1,10 +0,0 @@ -// Code generated by Speakeasy (https://speakeasyapi.dev). DO NOT EDIT. - -package shared - -// WorkspaceResponseRootTypeForWorkspaceResponse - Provides details of a single workspace. -type WorkspaceResponseRootTypeForWorkspaceResponse struct { - DataResidency GeographyEnum `json:"dataResidency"` - Name string `json:"name"` - WorkspaceID string `json:"workspaceId"` -} diff --git a/internal/sdk/pkg/models/shared/workspacesresponse.go b/internal/sdk/pkg/models/shared/workspacesresponse.go index 84e3d0842..b35d3295d 100755 --- a/internal/sdk/pkg/models/shared/workspacesresponse.go +++ b/internal/sdk/pkg/models/shared/workspacesresponse.go @@ -2,7 +2,6 @@ package shared -// WorkspacesResponse - Successful operation type WorkspacesResponse struct { Data []WorkspaceResponse `json:"data"` Next *string `json:"next,omitempty"` diff --git a/internal/sdk/pkg/utils/form.go b/internal/sdk/pkg/utils/form.go index bf962652f..3fea5fd07 100755 --- a/internal/sdk/pkg/utils/form.go +++ b/internal/sdk/pkg/utils/form.go @@ -17,10 +17,11 @@ func populateForm(paramName string, explode bool, objType reflect.Type, objValue formValues := url.Values{} + if isNil(objType, objValue) { + return formValues + } + if objType.Kind() == reflect.Pointer { - if objValue.IsNil() { - return formValues - } objType = objType.Elem() objValue = objValue.Elem() } @@ -43,11 +44,11 @@ func populateForm(paramName string, explode bool, objType reflect.Type, objValue fieldType := objType.Field(i) valType := objValue.Field(i) - if valType.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } + if isNil(fieldType.Type, valType) { + continue + } + if valType.Kind() == reflect.Pointer { valType = valType.Elem() } diff --git a/internal/sdk/pkg/utils/headers.go b/internal/sdk/pkg/utils/headers.go index d8ca9b181..0837022e2 100755 --- a/internal/sdk/pkg/utils/headers.go +++ b/internal/sdk/pkg/utils/headers.go @@ -31,10 +31,11 @@ func PopulateHeaders(ctx context.Context, req *http.Request, headers interface{} } func serializeHeader(objType reflect.Type, objValue reflect.Value, explode bool) string { + if isNil(objType, objValue) { + return "" + } + if objType.Kind() == reflect.Pointer { - if objValue.IsNil() { - return "" - } objType = objType.Elem() objValue = objValue.Elem() } @@ -47,10 +48,11 @@ func serializeHeader(objType reflect.Type, objValue reflect.Value, explode bool) fieldType := objType.Field(i) valType := objValue.Field(i) + if isNil(fieldType.Type, valType) { + continue + } + if fieldType.Type.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } valType = valType.Elem() } diff --git a/internal/sdk/pkg/utils/pathparams.go b/internal/sdk/pkg/utils/pathparams.go index c2fd7281e..1e0dcac84 100755 --- a/internal/sdk/pkg/utils/pathparams.go +++ b/internal/sdk/pkg/utils/pathparams.go @@ -61,10 +61,11 @@ func GenerateURL(ctx context.Context, serverURL, path string, pathParams interfa func getSimplePathParams(ctx context.Context, parentName string, objType reflect.Type, objValue reflect.Value, explode bool) map[string]string { pathParams := make(map[string]string) + if isNil(objType, objValue) { + return nil + } + if objType.Kind() == reflect.Ptr { - if objValue.IsNil() { - return nil - } objType = objType.Elem() objValue = objValue.Elem() } @@ -104,10 +105,11 @@ func getSimplePathParams(ctx context.Context, parentName string, objType reflect continue } + if isNil(fieldType.Type, valType) { + continue + } + if fieldType.Type.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } valType = valType.Elem() } diff --git a/internal/sdk/pkg/utils/queryparams.go b/internal/sdk/pkg/utils/queryparams.go index 87ecef882..0e94bcee4 100755 --- a/internal/sdk/pkg/utils/queryparams.go +++ b/internal/sdk/pkg/utils/queryparams.go @@ -76,15 +76,13 @@ func PopulateQueryParams(ctx context.Context, req *http.Request, queryParams int } func populateSerializedParams(tag *paramTag, objType reflect.Type, objValue reflect.Value) (map[string]string, error) { + if isNil(objType, objValue) { + return nil, nil + } + if objType.Kind() == reflect.Pointer { - if objValue.IsNil() { - return nil, nil - } objValue = objValue.Elem() } - if objValue.Interface() == nil { - return nil, nil - } values := map[string]string{} @@ -103,10 +101,11 @@ func populateSerializedParams(tag *paramTag, objType reflect.Type, objValue refl func populateDeepObjectParams(req *http.Request, tag *paramTag, objType reflect.Type, objValue reflect.Value) url.Values { values := url.Values{} + if isNil(objType, objValue) { + return values + } + if objType.Kind() == reflect.Pointer { - if objValue.IsNil() { - return values - } objType = objType.Elem() objValue = objValue.Elem() } @@ -117,10 +116,11 @@ func populateDeepObjectParams(req *http.Request, tag *paramTag, objType reflect. fieldType := objType.Field(i) valType := objValue.Field(i) + if isNil(fieldType.Type, valType) { + continue + } + if fieldType.Type.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } valType = valType.Elem() } diff --git a/internal/sdk/pkg/utils/requestbody.go b/internal/sdk/pkg/utils/requestbody.go index 5c1d2638c..d9fcbdaab 100755 --- a/internal/sdk/pkg/utils/requestbody.go +++ b/internal/sdk/pkg/utils/requestbody.go @@ -26,15 +26,15 @@ var ( urlEncodedEncodingRegex = regexp.MustCompile(`application\/x-www-form-urlencoded.*`) ) -func SerializeRequestBody(ctx context.Context, request interface{}, requestFieldName string, serializationMethod string) (*bytes.Buffer, string, error) { +func SerializeRequestBody(ctx context.Context, request interface{}, requestFieldName string, serializationMethod string) (io.Reader, string, error) { requestStructType := reflect.TypeOf(request) requestValType := reflect.ValueOf(request) - if requestStructType.Kind() == reflect.Pointer { - if requestValType.IsNil() { - return nil, "", nil - } + if isNil(requestStructType, requestValType) { + return nil, "", nil + } + if requestStructType.Kind() == reflect.Pointer { requestStructType = requestStructType.Elem() requestValType = requestValType.Elem() } @@ -50,7 +50,7 @@ func SerializeRequestBody(ctx context.Context, request interface{}, requestField if tag != nil { // request object (non-flattened) requestVal := requestValType.FieldByName(requestFieldName) - if requestField.Type.Kind() == reflect.Pointer && requestVal.IsNil() { + if isNil(requestField.Type, requestVal) { return nil, "", nil } @@ -116,11 +116,11 @@ func encodeMultipartFormData(w io.Writer, data interface{}) (string, error) { fieldType := field.Type valType := requestValType.Field(i) - if fieldType.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } + if isNil(fieldType, valType) { + continue + } + if fieldType.Kind() == reflect.Pointer { fieldType = fieldType.Elem() valType = valType.Elem() } @@ -231,11 +231,11 @@ func encodeFormData(fieldName string, w io.Writer, data interface{}) error { fieldType := field.Type valType := requestValType.Field(i) - if fieldType.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } + if isNil(fieldType, valType) { + continue + } + if fieldType.Kind() == reflect.Pointer { fieldType = fieldType.Elem() valType = valType.Elem() } diff --git a/internal/sdk/pkg/utils/security.go b/internal/sdk/pkg/utils/security.go index ec837d7fc..675cd5758 100755 --- a/internal/sdk/pkg/utils/security.go +++ b/internal/sdk/pkg/utils/security.go @@ -69,11 +69,11 @@ func parseSecurityStruct(c HTTPClient, security interface{}) *SecurityClient { securityStructType := reflect.TypeOf(security) securityValType := reflect.ValueOf(security) - if securityStructType.Kind() == reflect.Ptr { - if securityValType.IsNil() { - return nil - } + if isNil(securityStructType, securityValType) { + return nil + } + if securityStructType.Kind() == reflect.Ptr { securityStructType = securityStructType.Elem() securityValType = securityValType.Elem() } @@ -86,11 +86,11 @@ func parseSecurityStruct(c HTTPClient, security interface{}) *SecurityClient { kind := valType.Kind() - if fieldType.Type.Kind() == reflect.Pointer { - if valType.IsNil() { - continue - } + if isNil(fieldType.Type, valType) { + continue + } + if fieldType.Type.Kind() == reflect.Pointer { kind = valType.Elem().Kind() } @@ -117,11 +117,11 @@ func parseSecurityOption(c HTTPClient, option interface{}) *SecurityClient { optionStructType := reflect.TypeOf(option) optionValType := reflect.ValueOf(option) - if optionStructType.Kind() == reflect.Ptr { - if optionValType.IsNil() { - return nil - } + if isNil(optionStructType, optionValType) { + return nil + } + if optionStructType.Kind() == reflect.Ptr { optionStructType = optionStructType.Elem() optionValType = optionValType.Elem() } @@ -145,11 +145,11 @@ func parseSecurityScheme(client *SecurityClient, schemeTag *securityTag, scheme schemeType := reflect.TypeOf(scheme) schemeVal := reflect.ValueOf(scheme) - if schemeType.Kind() == reflect.Ptr { - if schemeVal.IsNil() { - return - } + if isNil(schemeType, schemeVal) { + return + } + if schemeType.Kind() == reflect.Ptr { schemeType = schemeType.Elem() schemeVal = schemeVal.Elem() } @@ -164,11 +164,11 @@ func parseSecurityScheme(client *SecurityClient, schemeTag *securityTag, scheme fieldType := schemeType.Field(i) valType := schemeVal.Field(i) - if fieldType.Type.Kind() == reflect.Ptr { - if valType.IsNil() { - continue - } + if isNil(fieldType.Type, valType) { + continue + } + if fieldType.Type.Kind() == reflect.Ptr { valType = valType.Elem() } diff --git a/internal/sdk/pkg/utils/utils.go b/internal/sdk/pkg/utils/utils.go index 5f50f1695..7accc1699 100755 --- a/internal/sdk/pkg/utils/utils.go +++ b/internal/sdk/pkg/utils/utils.go @@ -45,6 +45,10 @@ func UnmarshalJsonFromResponseBody(body io.Reader, out interface{}) error { } func ReplaceParameters(stringWithParams string, params map[string]string) string { + if len(params) == 0 { + return stringWithParams + } + return paramRegex.ReplaceAllStringFunc(stringWithParams, func(match string) string { match = match[1 : len(match)-1] return params[match] @@ -150,3 +154,11 @@ func populateFromGlobals(fieldType reflect.StructField, valType reflect.Value, p return valType } + +func isNil(typ reflect.Type, val reflect.Value) bool { + if typ.Kind() == reflect.Ptr || typ.Kind() == reflect.Map || typ.Kind() == reflect.Slice || typ.Kind() == reflect.Interface { + return val.IsNil() + } + + return false +} diff --git a/internal/sdk/sdk.go b/internal/sdk/sdk.go index 86410d614..3f1332596 100755 --- a/internal/sdk/sdk.go +++ b/internal/sdk/sdk.go @@ -122,8 +122,8 @@ func New(opts ...SDKOption) *SDK { sdkConfiguration: sdkConfiguration{ Language: "terraform", OpenAPIDocVersion: "1.0.0", - SDKVersion: "0.3.3", - GenVersion: "2.86.10", + SDKVersion: "0.3.4", + GenVersion: "2.108.3", }, } for _, opt := range opts { diff --git a/internal/sdk/sources.go b/internal/sdk/sources.go index bbce4346f..632fff7ce 100755 --- a/internal/sdk/sources.go +++ b/internal/sdk/sources.go @@ -2302,11 +2302,11 @@ func (s *sources) CreateSourceConvex(ctx context.Context, request shared.SourceC return res, nil } -// CreateSourceDatadog - Create a source +// CreateSourceDatascope - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDatadog(ctx context.Context, request shared.SourceDatadogCreateRequest) (*operations.CreateSourceDatadogResponse, error) { +func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.SourceDatascopeCreateRequest) (*operations.CreateSourceDatascopeResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Datadog" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Datascope" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2345,7 +2345,7 @@ func (s *sources) CreateSourceDatadog(ctx context.Context, request shared.Source contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDatadogResponse{ + res := &operations.CreateSourceDatascopeResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2369,11 +2369,11 @@ func (s *sources) CreateSourceDatadog(ctx context.Context, request shared.Source return res, nil } -// CreateSourceDatascope - Create a source +// CreateSourceDelighted - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.SourceDatascopeCreateRequest) (*operations.CreateSourceDatascopeResponse, error) { +func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.SourceDelightedCreateRequest) (*operations.CreateSourceDelightedResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Datascope" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Delighted" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2412,7 +2412,7 @@ func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.Sour contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDatascopeResponse{ + res := &operations.CreateSourceDelightedResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2436,11 +2436,11 @@ func (s *sources) CreateSourceDatascope(ctx context.Context, request shared.Sour return res, nil } -// CreateSourceDelighted - Create a source +// CreateSourceDixa - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.SourceDelightedCreateRequest) (*operations.CreateSourceDelightedResponse, error) { +func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDixaCreateRequest) (*operations.CreateSourceDixaResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Delighted" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Dixa" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2479,7 +2479,7 @@ func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.Sour contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDelightedResponse{ + res := &operations.CreateSourceDixaResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2503,11 +2503,11 @@ func (s *sources) CreateSourceDelighted(ctx context.Context, request shared.Sour return res, nil } -// CreateSourceDixa - Create a source +// CreateSourceDockerhub - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDixaCreateRequest) (*operations.CreateSourceDixaResponse, error) { +func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.SourceDockerhubCreateRequest) (*operations.CreateSourceDockerhubResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Dixa" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Dockerhub" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2546,7 +2546,7 @@ func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDix contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDixaResponse{ + res := &operations.CreateSourceDockerhubResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2570,11 +2570,11 @@ func (s *sources) CreateSourceDixa(ctx context.Context, request shared.SourceDix return res, nil } -// CreateSourceDockerhub - Create a source +// CreateSourceDremio - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.SourceDockerhubCreateRequest) (*operations.CreateSourceDockerhubResponse, error) { +func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceDremioCreateRequest) (*operations.CreateSourceDremioResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Dockerhub" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Dremio" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2613,7 +2613,7 @@ func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.Sour contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDockerhubResponse{ + res := &operations.CreateSourceDremioResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2637,11 +2637,11 @@ func (s *sources) CreateSourceDockerhub(ctx context.Context, request shared.Sour return res, nil } -// CreateSourceDremio - Create a source +// CreateSourceDynamodb - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceDremioCreateRequest) (*operations.CreateSourceDremioResponse, error) { +func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.SourceDynamodbCreateRequest) (*operations.CreateSourceDynamodbResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Dremio" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Dynamodb" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2680,7 +2680,7 @@ func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceD contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDremioResponse{ + res := &operations.CreateSourceDynamodbResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2704,11 +2704,11 @@ func (s *sources) CreateSourceDremio(ctx context.Context, request shared.SourceD return res, nil } -// CreateSourceDynamodb - Create a source +// CreateSourceE2eTestCloud - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.SourceDynamodbCreateRequest) (*operations.CreateSourceDynamodbResponse, error) { +func (s *sources) CreateSourceE2eTestCloud(ctx context.Context, request shared.SourceE2eTestCloudCreateRequest) (*operations.CreateSourceE2eTestCloudResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Dynamodb" + url := strings.TrimSuffix(baseURL, "/") + "/sources#E2eTestCloud" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2747,7 +2747,7 @@ func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.Sourc contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceDynamodbResponse{ + res := &operations.CreateSourceE2eTestCloudResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2771,11 +2771,11 @@ func (s *sources) CreateSourceDynamodb(ctx context.Context, request shared.Sourc return res, nil } -// CreateSourceE2eTestCloud - Create a source +// CreateSourceEmailoctopus - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceE2eTestCloud(ctx context.Context, request shared.SourceE2eTestCloudCreateRequest) (*operations.CreateSourceE2eTestCloudResponse, error) { +func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.SourceEmailoctopusCreateRequest) (*operations.CreateSourceEmailoctopusResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#E2eTestCloud" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Emailoctopus" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2814,7 +2814,7 @@ func (s *sources) CreateSourceE2eTestCloud(ctx context.Context, request shared.S contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceE2eTestCloudResponse{ + res := &operations.CreateSourceEmailoctopusResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2838,11 +2838,11 @@ func (s *sources) CreateSourceE2eTestCloud(ctx context.Context, request shared.S return res, nil } -// CreateSourceEmailoctopus - Create a source +// CreateSourceExchangeRates - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.SourceEmailoctopusCreateRequest) (*operations.CreateSourceEmailoctopusResponse, error) { +func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared.SourceExchangeRatesCreateRequest) (*operations.CreateSourceExchangeRatesResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Emailoctopus" + url := strings.TrimSuffix(baseURL, "/") + "/sources#ExchangeRates" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2881,7 +2881,7 @@ func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.S contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceEmailoctopusResponse{ + res := &operations.CreateSourceExchangeRatesResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2905,11 +2905,11 @@ func (s *sources) CreateSourceEmailoctopus(ctx context.Context, request shared.S return res, nil } -// CreateSourceExchangeRates - Create a source +// CreateSourceFacebookMarketing - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared.SourceExchangeRatesCreateRequest) (*operations.CreateSourceExchangeRatesResponse, error) { +func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request shared.SourceFacebookMarketingCreateRequest) (*operations.CreateSourceFacebookMarketingResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#ExchangeRates" + url := strings.TrimSuffix(baseURL, "/") + "/sources#FacebookMarketing" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -2948,7 +2948,7 @@ func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared. contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceExchangeRatesResponse{ + res := &operations.CreateSourceFacebookMarketingResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -2972,11 +2972,11 @@ func (s *sources) CreateSourceExchangeRates(ctx context.Context, request shared. return res, nil } -// CreateSourceFacebookMarketing - Create a source +// CreateSourceFacebookPages - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request shared.SourceFacebookMarketingCreateRequest) (*operations.CreateSourceFacebookMarketingResponse, error) { +func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared.SourceFacebookPagesCreateRequest) (*operations.CreateSourceFacebookPagesResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#FacebookMarketing" + url := strings.TrimSuffix(baseURL, "/") + "/sources#FacebookPages" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -3015,7 +3015,7 @@ func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request sha contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceFacebookMarketingResponse{ + res := &operations.CreateSourceFacebookPagesResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -3039,11 +3039,11 @@ func (s *sources) CreateSourceFacebookMarketing(ctx context.Context, request sha return res, nil } -// CreateSourceFacebookPages - Create a source +// CreateSourceFaker - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared.SourceFacebookPagesCreateRequest) (*operations.CreateSourceFacebookPagesResponse, error) { +func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFakerCreateRequest) (*operations.CreateSourceFakerResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#FacebookPages" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Faker" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -3082,7 +3082,7 @@ func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared. contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceFacebookPagesResponse{ + res := &operations.CreateSourceFakerResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -3106,11 +3106,11 @@ func (s *sources) CreateSourceFacebookPages(ctx context.Context, request shared. return res, nil } -// CreateSourceFaker - Create a source +// CreateSourceFauna - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFakerCreateRequest) (*operations.CreateSourceFakerResponse, error) { +func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFaunaCreateRequest) (*operations.CreateSourceFaunaResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Faker" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Fauna" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -3149,7 +3149,7 @@ func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFa contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceFakerResponse{ + res := &operations.CreateSourceFaunaResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -3173,11 +3173,11 @@ func (s *sources) CreateSourceFaker(ctx context.Context, request shared.SourceFa return res, nil } -// CreateSourceFauna - Create a source +// CreateSourceFileSecure - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFaunaCreateRequest) (*operations.CreateSourceFaunaResponse, error) { +func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.SourceFileSecureCreateRequest) (*operations.CreateSourceFileSecureResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Fauna" + url := strings.TrimSuffix(baseURL, "/") + "/sources#FileSecure" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -3216,7 +3216,7 @@ func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFa contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceFaunaResponse{ + res := &operations.CreateSourceFileSecureResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -3240,11 +3240,11 @@ func (s *sources) CreateSourceFauna(ctx context.Context, request shared.SourceFa return res, nil } -// CreateSourceFileSecure - Create a source +// CreateSourceFirebolt - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.SourceFileSecureCreateRequest) (*operations.CreateSourceFileSecureResponse, error) { +func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.SourceFireboltCreateRequest) (*operations.CreateSourceFireboltResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#FileSecure" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Firebolt" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -3283,7 +3283,7 @@ func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.Sou contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceFileSecureResponse{ + res := &operations.CreateSourceFireboltResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -3307,11 +3307,11 @@ func (s *sources) CreateSourceFileSecure(ctx context.Context, request shared.Sou return res, nil } -// CreateSourceFirebolt - Create a source +// CreateSourceFreshcaller - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.SourceFireboltCreateRequest) (*operations.CreateSourceFireboltResponse, error) { +func (s *sources) CreateSourceFreshcaller(ctx context.Context, request shared.SourceFreshcallerCreateRequest) (*operations.CreateSourceFreshcallerResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Firebolt" + url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshcaller" bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") if err != nil { @@ -3350,208 +3350,141 @@ func (s *sources) CreateSourceFirebolt(ctx context.Context, request shared.Sourc contentType := httpRes.Header.Get("Content-Type") - res := &operations.CreateSourceFireboltResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 400: - fallthrough - case httpRes.StatusCode == 403: - } - - return res, nil -} - -// CreateSourceFreshcaller - Create a source -// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFreshcaller(ctx context.Context, request shared.SourceFreshcallerCreateRequest) (*operations.CreateSourceFreshcallerResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshcaller" - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.CreateSourceFreshcallerResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 400: - fallthrough - case httpRes.StatusCode == 403: - } - - return res, nil -} - -// CreateSourceFreshdesk - Create a source -// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFreshdesk(ctx context.Context, request shared.SourceFreshdeskCreateRequest) (*operations.CreateSourceFreshdeskResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshdesk" - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.CreateSourceFreshdeskResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 400: - fallthrough - case httpRes.StatusCode == 403: - } - - return res, nil -} - -// CreateSourceFreshsales - Create a source -// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceFreshsales(ctx context.Context, request shared.SourceFreshsalesCreateRequest) (*operations.CreateSourceFreshsalesResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshsales" - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.CreateSourceFreshsalesResponse{ + res := &operations.CreateSourceFreshcallerResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(contentType, `application/json`): + var out *shared.SourceResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { + return res, err + } + + res.SourceResponse = out + } + case httpRes.StatusCode == 400: + fallthrough + case httpRes.StatusCode == 403: + } + + return res, nil +} + +// CreateSourceFreshdesk - Create a source +// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. +func (s *sources) CreateSourceFreshdesk(ctx context.Context, request shared.SourceFreshdeskCreateRequest) (*operations.CreateSourceFreshdeskResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshdesk" + + bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") + if err != nil { + return nil, fmt.Errorf("error serializing request body: %w", err) + } + + debugBody := bytes.NewBuffer([]byte{}) + debugReader := io.TeeReader(bodyReader, debugBody) + + req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + req.Header.Set("Content-Type", reqContentType) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Request.Body = io.NopCloser(debugBody) + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.CreateSourceFreshdeskResponse{ + StatusCode: httpRes.StatusCode, + ContentType: contentType, + RawResponse: httpRes, + } + switch { + case httpRes.StatusCode == 200: + switch { + case utils.MatchContentType(contentType, `application/json`): + var out *shared.SourceResponse + if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { + return res, err + } + + res.SourceResponse = out + } + case httpRes.StatusCode == 400: + fallthrough + case httpRes.StatusCode == 403: + } + + return res, nil +} + +// CreateSourceFreshsales - Create a source +// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. +func (s *sources) CreateSourceFreshsales(ctx context.Context, request shared.SourceFreshsalesCreateRequest) (*operations.CreateSourceFreshsalesResponse, error) { + baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) + url := strings.TrimSuffix(baseURL, "/") + "/sources#Freshsales" + + bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") + if err != nil { + return nil, fmt.Errorf("error serializing request body: %w", err) + } + + debugBody := bytes.NewBuffer([]byte{}) + debugReader := io.TeeReader(bodyReader, debugBody) + + req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) + if err != nil { + return nil, fmt.Errorf("error creating request: %w", err) + } + req.Header.Set("Accept", "application/json") + req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) + + req.Header.Set("Content-Type", reqContentType) + + client := s.sdkConfiguration.SecurityClient + + httpRes, err := client.Do(req) + if err != nil { + return nil, fmt.Errorf("error sending request: %w", err) + } + if httpRes == nil { + return nil, fmt.Errorf("error sending request: no response") + } + + rawBody, err := io.ReadAll(httpRes.Body) + if err != nil { + return nil, fmt.Errorf("error reading response body: %w", err) + } + httpRes.Request.Body = io.NopCloser(debugBody) + httpRes.Body.Close() + httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) + + contentType := httpRes.Header.Get("Content-Type") + + res := &operations.CreateSourceFreshsalesResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -7528,73 +7461,6 @@ func (s *sources) CreateSourceOnesignal(ctx context.Context, request shared.Sour return res, nil } -// CreateSourceOpenweather - Create a source -// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourceOpenweather(ctx context.Context, request shared.SourceOpenweatherCreateRequest) (*operations.CreateSourceOpenweatherResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#Openweather" - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.CreateSourceOpenweatherResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 400: - fallthrough - case httpRes.StatusCode == 403: - } - - return res, nil -} - // CreateSourceOracle - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. func (s *sources) CreateSourceOracle(ctx context.Context, request shared.SourceOracleCreateRequest) (*operations.CreateSourceOracleResponse, error) { @@ -8868,73 +8734,6 @@ func (s *sources) CreateSourcePrestashop(ctx context.Context, request shared.Sou return res, nil } -// CreateSourcePublicApis - Create a source -// Creates a source given a name, workspace id, and a json blob containing the configuration for the source. -func (s *sources) CreateSourcePublicApis(ctx context.Context, request shared.SourcePublicApisCreateRequest) (*operations.CreateSourcePublicApisResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url := strings.TrimSuffix(baseURL, "/") + "/sources#PublicApis" - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "Request", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "POST", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.CreateSourcePublicApisResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 400: - fallthrough - case httpRes.StatusCode == 403: - } - - return res, nil -} - // CreateSourcePunkAPI - Create a source // Creates a source given a name, workspace id, and a json blob containing the configuration for the source. func (s *sources) CreateSourcePunkAPI(ctx context.Context, request shared.SourcePunkAPICreateRequest) (*operations.CreateSourcePunkAPIResponse, error) { @@ -14959,57 +14758,7 @@ func (s *sources) DeleteSourceClockify(ctx context.Context, request operations.D contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceClockifyResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: - fallthrough - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - -// DeleteSourceCloseCom - Delete a Source -func (s *sources) DeleteSourceCloseCom(ctx context.Context, request operations.DeleteSourceCloseComRequest) (*operations.DeleteSourceCloseComResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CloseCom", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "*/*") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.DeleteSourceCloseComResponse{ + res := &operations.DeleteSourceClockifyResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15025,10 +14774,10 @@ func (s *sources) DeleteSourceCloseCom(ctx context.Context, request operations.D return res, nil } -// DeleteSourceCoda - Delete a Source -func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.DeleteSourceCodaRequest) (*operations.DeleteSourceCodaResponse, error) { +// DeleteSourceCloseCom - Delete a Source +func (s *sources) DeleteSourceCloseCom(ctx context.Context, request operations.DeleteSourceCloseComRequest) (*operations.DeleteSourceCloseComResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coda", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CloseCom", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15059,7 +14808,7 @@ func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.Delet contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceCodaResponse{ + res := &operations.DeleteSourceCloseComResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15075,10 +14824,10 @@ func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.Delet return res, nil } -// DeleteSourceCoinAPI - Delete a Source -func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.DeleteSourceCoinAPIRequest) (*operations.DeleteSourceCoinAPIResponse, error) { +// DeleteSourceCoda - Delete a Source +func (s *sources) DeleteSourceCoda(ctx context.Context, request operations.DeleteSourceCodaRequest) (*operations.DeleteSourceCodaResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CoinApi", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coda", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15109,7 +14858,7 @@ func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.De contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceCoinAPIResponse{ + res := &operations.DeleteSourceCodaResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15125,10 +14874,10 @@ func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.De return res, nil } -// DeleteSourceCoinmarketcap - Delete a Source -func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operations.DeleteSourceCoinmarketcapRequest) (*operations.DeleteSourceCoinmarketcapResponse, error) { +// DeleteSourceCoinAPI - Delete a Source +func (s *sources) DeleteSourceCoinAPI(ctx context.Context, request operations.DeleteSourceCoinAPIRequest) (*operations.DeleteSourceCoinAPIResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coinmarketcap", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#CoinApi", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15159,7 +14908,7 @@ func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operati contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceCoinmarketcapResponse{ + res := &operations.DeleteSourceCoinAPIResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15175,10 +14924,10 @@ func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operati return res, nil } -// DeleteSourceConfigcat - Delete a Source -func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations.DeleteSourceConfigcatRequest) (*operations.DeleteSourceConfigcatResponse, error) { +// DeleteSourceCoinmarketcap - Delete a Source +func (s *sources) DeleteSourceCoinmarketcap(ctx context.Context, request operations.DeleteSourceCoinmarketcapRequest) (*operations.DeleteSourceCoinmarketcapResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Configcat", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Coinmarketcap", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15209,7 +14958,7 @@ func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations. contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceConfigcatResponse{ + res := &operations.DeleteSourceCoinmarketcapResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15225,10 +14974,10 @@ func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations. return res, nil } -// DeleteSourceConfluence - Delete a Source -func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations.DeleteSourceConfluenceRequest) (*operations.DeleteSourceConfluenceResponse, error) { +// DeleteSourceConfigcat - Delete a Source +func (s *sources) DeleteSourceConfigcat(ctx context.Context, request operations.DeleteSourceConfigcatRequest) (*operations.DeleteSourceConfigcatResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Confluence", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Configcat", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15259,7 +15008,7 @@ func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceConfluenceResponse{ + res := &operations.DeleteSourceConfigcatResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15275,10 +15024,10 @@ func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations return res, nil } -// DeleteSourceConvex - Delete a Source -func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.DeleteSourceConvexRequest) (*operations.DeleteSourceConvexResponse, error) { +// DeleteSourceConfluence - Delete a Source +func (s *sources) DeleteSourceConfluence(ctx context.Context, request operations.DeleteSourceConfluenceRequest) (*operations.DeleteSourceConfluenceResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Convex", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Confluence", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15309,7 +15058,7 @@ func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.Del contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceConvexResponse{ + res := &operations.DeleteSourceConfluenceResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -15325,10 +15074,10 @@ func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.Del return res, nil } -// DeleteSourceDatadog - Delete a Source -func (s *sources) DeleteSourceDatadog(ctx context.Context, request operations.DeleteSourceDatadogRequest) (*operations.DeleteSourceDatadogResponse, error) { +// DeleteSourceConvex - Delete a Source +func (s *sources) DeleteSourceConvex(ctx context.Context, request operations.DeleteSourceConvexRequest) (*operations.DeleteSourceConvexResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Datadog", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Convex", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -15359,7 +15108,7 @@ func (s *sources) DeleteSourceDatadog(ctx context.Context, request operations.De contentType := httpRes.Header.Get("Content-Type") - res := &operations.DeleteSourceDatadogResponse{ + res := &operations.DeleteSourceConvexResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -19225,56 +18974,6 @@ func (s *sources) DeleteSourceOnesignal(ctx context.Context, request operations. return res, nil } -// DeleteSourceOpenweather - Delete a Source -func (s *sources) DeleteSourceOpenweather(ctx context.Context, request operations.DeleteSourceOpenweatherRequest) (*operations.DeleteSourceOpenweatherResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Openweather", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "*/*") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.DeleteSourceOpenweatherResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: - fallthrough - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // DeleteSourceOracle - Delete a Source func (s *sources) DeleteSourceOracle(ctx context.Context, request operations.DeleteSourceOracleRequest) (*operations.DeleteSourceOracleResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -20225,56 +19924,6 @@ func (s *sources) DeleteSourcePrestashop(ctx context.Context, request operations return res, nil } -// DeleteSourcePublicApis - Delete a Source -func (s *sources) DeleteSourcePublicApis(ctx context.Context, request operations.DeleteSourcePublicApisRequest) (*operations.DeleteSourcePublicApisResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PublicApis", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - req, err := http.NewRequestWithContext(ctx, "DELETE", url, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "*/*") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.DeleteSourcePublicApisResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: - fallthrough - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // DeleteSourcePunkAPI - Delete a Source func (s *sources) DeleteSourcePunkAPI(ctx context.Context, request operations.DeleteSourcePunkAPIRequest) (*operations.DeleteSourcePunkAPIResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -25747,64 +25396,6 @@ func (s *sources) GetSourceConvex(ctx context.Context, request operations.GetSou return res, nil } -// GetSourceDatadog - Get Source details -func (s *sources) GetSourceDatadog(ctx context.Context, request operations.GetSourceDatadogRequest) (*operations.GetSourceDatadogResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Datadog", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.GetSourceDatadogResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // GetSourceDatascope - Get Source details func (s *sources) GetSourceDatascope(ctx context.Context, request operations.GetSourceDatascopeRequest) (*operations.GetSourceDatascopeResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -29981,68 +29572,10 @@ func (s *sources) GetSourceNetsuite(ctx context.Context, request operations.GetS return res, nil } -// GetSourceNotion - Get Source details -func (s *sources) GetSourceNotion(ctx context.Context, request operations.GetSourceNotionRequest) (*operations.GetSourceNotionResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Notion", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.GetSourceNotionResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - -// GetSourceNytimes - Get Source details -func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSourceNytimesRequest) (*operations.GetSourceNytimesResponse, error) { +// GetSourceNotion - Get Source details +func (s *sources) GetSourceNotion(ctx context.Context, request operations.GetSourceNotionRequest) (*operations.GetSourceNotionResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Nytimes", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Notion", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -30073,7 +29606,7 @@ func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSo contentType := httpRes.Header.Get("Content-Type") - res := &operations.GetSourceNytimesResponse{ + res := &operations.GetSourceNotionResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -30097,10 +29630,10 @@ func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSo return res, nil } -// GetSourceOkta - Get Source details -func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourceOktaRequest) (*operations.GetSourceOktaResponse, error) { +// GetSourceNytimes - Get Source details +func (s *sources) GetSourceNytimes(ctx context.Context, request operations.GetSourceNytimesRequest) (*operations.GetSourceNytimesResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Okta", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Nytimes", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -30131,7 +29664,7 @@ func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourc contentType := httpRes.Header.Get("Content-Type") - res := &operations.GetSourceOktaResponse{ + res := &operations.GetSourceNytimesResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -30155,10 +29688,10 @@ func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourc return res, nil } -// GetSourceOmnisend - Get Source details -func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetSourceOmnisendRequest) (*operations.GetSourceOmnisendResponse, error) { +// GetSourceOkta - Get Source details +func (s *sources) GetSourceOkta(ctx context.Context, request operations.GetSourceOktaRequest) (*operations.GetSourceOktaResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Omnisend", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Okta", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -30189,7 +29722,7 @@ func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetS contentType := httpRes.Header.Get("Content-Type") - res := &operations.GetSourceOmnisendResponse{ + res := &operations.GetSourceOktaResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -30213,10 +29746,10 @@ func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetS return res, nil } -// GetSourceOnesignal - Get Source details -func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.GetSourceOnesignalRequest) (*operations.GetSourceOnesignalResponse, error) { +// GetSourceOmnisend - Get Source details +func (s *sources) GetSourceOmnisend(ctx context.Context, request operations.GetSourceOmnisendRequest) (*operations.GetSourceOmnisendResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Onesignal", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Omnisend", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -30247,7 +29780,7 @@ func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.Get contentType := httpRes.Header.Get("Content-Type") - res := &operations.GetSourceOnesignalResponse{ + res := &operations.GetSourceOmnisendResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -30271,10 +29804,10 @@ func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.Get return res, nil } -// GetSourceOpenweather - Get Source details -func (s *sources) GetSourceOpenweather(ctx context.Context, request operations.GetSourceOpenweatherRequest) (*operations.GetSourceOpenweatherResponse, error) { +// GetSourceOnesignal - Get Source details +func (s *sources) GetSourceOnesignal(ctx context.Context, request operations.GetSourceOnesignalRequest) (*operations.GetSourceOnesignalResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Openweather", request, nil) + url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Onesignal", request, nil) if err != nil { return nil, fmt.Errorf("error generating URL: %w", err) } @@ -30305,7 +29838,7 @@ func (s *sources) GetSourceOpenweather(ctx context.Context, request operations.G contentType := httpRes.Header.Get("Content-Type") - res := &operations.GetSourceOpenweatherResponse{ + res := &operations.GetSourceOnesignalResponse{ StatusCode: httpRes.StatusCode, ContentType: contentType, RawResponse: httpRes, @@ -31431,64 +30964,6 @@ func (s *sources) GetSourcePrestashop(ctx context.Context, request operations.Ge return res, nil } -// GetSourcePublicApis - Get Source details -func (s *sources) GetSourcePublicApis(ctx context.Context, request operations.GetSourcePublicApisRequest) (*operations.GetSourcePublicApisResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PublicApis", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - req, err := http.NewRequestWithContext(ctx, "GET", url, nil) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "application/json") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.GetSourcePublicApisResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode == 200: - switch { - case utils.MatchContentType(contentType, `application/json`): - var out *shared.SourceResponse - if err := utils.UnmarshalJsonFromResponseBody(bytes.NewBuffer(rawBody), &out); err != nil { - return res, err - } - - res.SourceResponse = out - } - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // GetSourcePunkAPI - Get Source details func (s *sources) GetSourcePunkAPI(ctx context.Context, request operations.GetSourcePunkAPIRequest) (*operations.GetSourcePunkAPIResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -37825,67 +37300,6 @@ func (s *sources) PutSourceConvex(ctx context.Context, request operations.PutSou return res, nil } -// PutSourceDatadog - Update a Source fully -func (s *sources) PutSourceDatadog(ctx context.Context, request operations.PutSourceDatadogRequest) (*operations.PutSourceDatadogResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Datadog", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceDatadogPutRequest", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "*/*") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.PutSourceDatadogResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: - fallthrough - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // PutSourceDatascope - Update a Source fully func (s *sources) PutSourceDatascope(ctx context.Context, request operations.PutSourceDatascopeRequest) (*operations.PutSourceDatascopeResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -42583,67 +41997,6 @@ func (s *sources) PutSourceOnesignal(ctx context.Context, request operations.Put return res, nil } -// PutSourceOpenweather - Update a Source fully -func (s *sources) PutSourceOpenweather(ctx context.Context, request operations.PutSourceOpenweatherRequest) (*operations.PutSourceOpenweatherResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#Openweather", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourceOpenweatherPutRequest", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "*/*") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.PutSourceOpenweatherResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: - fallthrough - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // PutSourceOracle - Update a Source fully func (s *sources) PutSourceOracle(ctx context.Context, request operations.PutSourceOracleRequest) (*operations.PutSourceOracleResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) @@ -43803,67 +43156,6 @@ func (s *sources) PutSourcePrestashop(ctx context.Context, request operations.Pu return res, nil } -// PutSourcePublicApis - Update a Source fully -func (s *sources) PutSourcePublicApis(ctx context.Context, request operations.PutSourcePublicApisRequest) (*operations.PutSourcePublicApisResponse, error) { - baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails()) - url, err := utils.GenerateURL(ctx, baseURL, "/sources/{sourceId}#PublicApis", request, nil) - if err != nil { - return nil, fmt.Errorf("error generating URL: %w", err) - } - - bodyReader, reqContentType, err := utils.SerializeRequestBody(ctx, request, "SourcePublicApisPutRequest", "json") - if err != nil { - return nil, fmt.Errorf("error serializing request body: %w", err) - } - - debugBody := bytes.NewBuffer([]byte{}) - debugReader := io.TeeReader(bodyReader, debugBody) - - req, err := http.NewRequestWithContext(ctx, "PUT", url, debugReader) - if err != nil { - return nil, fmt.Errorf("error creating request: %w", err) - } - req.Header.Set("Accept", "*/*") - req.Header.Set("user-agent", fmt.Sprintf("speakeasy-sdk/%s %s %s %s", s.sdkConfiguration.Language, s.sdkConfiguration.SDKVersion, s.sdkConfiguration.GenVersion, s.sdkConfiguration.OpenAPIDocVersion)) - - req.Header.Set("Content-Type", reqContentType) - - client := s.sdkConfiguration.SecurityClient - - httpRes, err := client.Do(req) - if err != nil { - return nil, fmt.Errorf("error sending request: %w", err) - } - if httpRes == nil { - return nil, fmt.Errorf("error sending request: no response") - } - - rawBody, err := io.ReadAll(httpRes.Body) - if err != nil { - return nil, fmt.Errorf("error reading response body: %w", err) - } - httpRes.Request.Body = io.NopCloser(debugBody) - httpRes.Body.Close() - httpRes.Body = io.NopCloser(bytes.NewBuffer(rawBody)) - - contentType := httpRes.Header.Get("Content-Type") - - res := &operations.PutSourcePublicApisResponse{ - StatusCode: httpRes.StatusCode, - ContentType: contentType, - RawResponse: httpRes, - } - switch { - case httpRes.StatusCode >= 200 && httpRes.StatusCode < 300: - fallthrough - case httpRes.StatusCode == 403: - fallthrough - case httpRes.StatusCode == 404: - } - - return res, nil -} - // PutSourcePunkAPI - Update a Source fully func (s *sources) PutSourcePunkAPI(ctx context.Context, request operations.PutSourcePunkAPIRequest) (*operations.PutSourcePunkAPIResponse, error) { baseURL := utils.ReplaceParameters(s.sdkConfiguration.GetServerDetails())